Skip to content

Commit

Permalink
Merge pull request #3914 from jtratner/fix-network-using-tests
Browse files Browse the repository at this point in the history
TST: Move explicit connectivity checks to decorator.
  • Loading branch information
jreback committed Jun 21, 2013
2 parents 78a71b1 + 7ddb586 commit 36c1263
Show file tree
Hide file tree
Showing 7 changed files with 252 additions and 197 deletions.
5 changes: 5 additions & 0 deletions doc/source/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,11 @@ pandas 0.11.1
integers or floats that are in an epoch unit of ``D, s, ms, us, ns``, thanks @mtkini (:issue:`3969`)
(e.g. unix timestamps or epoch ``s``, with fracional seconds allowed) (:issue:`3540`)
- DataFrame corr method (spearman) is now cythonized.
- Improved ``network`` test decorator to catch ``IOError`` (and therefore
``URLError`` as well). Added ``with_connectivity_check`` decorator to allow
explicitly checking a website as a proxy for seeing if there is network
connectivity. Plus, new ``optional_args`` decorator factory for decorators.
(:issue:`3910`, :issue:`3914`)

**API Changes**

Expand Down
5 changes: 5 additions & 0 deletions doc/source/v0.11.1.txt
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,11 @@ Bug Fixes
- ``read_html`` now correctly skips tests (:issue:`3741`)
- Fixed a bug where ``DataFrame.replace`` with a compiled regular expression
in the ``to_replace`` argument wasn't working (:issue:`3907`)
- Improved ``network`` test decorator to catch ``IOError`` (and therefore
``URLError`` as well). Added ``with_connectivity_check`` decorator to allow
explicitly checking a website as a proxy for seeing if there is network
connectivity. Plus, new ``optional_args`` decorator factory for decorators.
(:issue:`3910`, :issue:`3914`)

See the :ref:`full release notes
<release>` or issue tracker
Expand Down
26 changes: 9 additions & 17 deletions pandas/io/tests/test_fred.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import pandas.io.data as web
from pandas.util.testing import (network, assert_frame_equal,
assert_series_equal,
assert_almost_equal)
assert_almost_equal, with_connectivity_check)
from numpy.testing.decorators import slow

import urllib2
Expand All @@ -17,7 +17,7 @@
class TestFred(unittest.TestCase):

@slow
@network
@with_connectivity_check("http://www.google.com")
def test_fred(self):
"""
Throws an exception when DataReader can't get a 200 response from
Expand All @@ -26,22 +26,14 @@ def test_fred(self):
start = datetime(2010, 1, 1)
end = datetime(2013, 01, 27)

try:
self.assertEquals(
web.DataReader("GDP", "fred", start, end)['GDP'].tail(1),
16004.5)
self.assertEquals(
web.DataReader("GDP", "fred", start, end)['GDP'].tail(1),
16004.5)

self.assertRaises(
Exception,
lambda: web.DataReader("NON EXISTENT SERIES", 'fred',
start, end))
except urllib2.URLError:
try:
urllib2.urlopen('http://google.com')
except urllib2.URLError:
raise nose.SkipTest
else:
raise
self.assertRaises(
Exception,
lambda: web.DataReader("NON EXISTENT SERIES", 'fred',
start, end))

@slow
@network
Expand Down
68 changes: 12 additions & 56 deletions pandas/io/tests/test_ga.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@
import os
import unittest
import nose
from datetime import datetime

import nose
import pandas as pd
import pandas.core.common as com
from pandas import DataFrame
from pandas.util.testing import network, assert_frame_equal
from pandas.util.testing import network, assert_frame_equal, with_connectivity_check
from numpy.testing.decorators import slow

try:
import httplib2
from pandas.io.ga import GAnalytics, read_ga
from pandas.io.auth import AuthenticationConfigError, reset_token_store
from pandas.io import auth
except ImportError:
raise nose.SkipTest

class TestGoogle(unittest.TestCase):

_multiprocess_can_split_ = True

def test_remove_token_store(self):
import os
try:
import pandas.io.auth as auth
from pandas.io.ga import reset_token_store
except ImportError:
raise nose.SkipTest

auth.DEFAULT_TOKEN_FILE = 'test.dat'
with open(auth.DEFAULT_TOKEN_FILE, 'w') as fh:
fh.write('test')
Expand All @@ -31,13 +31,6 @@ def test_remove_token_store(self):
@slow
@network
def test_getdata(self):
try:
import httplib2
from pandas.io.ga import GAnalytics, read_ga
from pandas.io.auth import AuthenticationConfigError
except ImportError:
raise nose.SkipTest

try:
end_date = datetime.now()
start_date = end_date - pd.offsets.Day() * 5
Expand Down Expand Up @@ -76,24 +69,10 @@ def test_getdata(self):

except AuthenticationConfigError:
raise nose.SkipTest
except httplib2.ServerNotFoundError:
try:
h = httplib2.Http()
response, content = h.request("http://www.google.com")
raise
except httplib2.ServerNotFoundError:
raise nose.SkipTest

@slow
@network
@with_connectivity_check("http://www.google.com")
def test_iterator(self):
try:
import httplib2
from pandas.io.ga import GAnalytics, read_ga
from pandas.io.auth import AuthenticationConfigError
except ImportError:
raise nose.SkipTest

try:
reader = GAnalytics()

Expand All @@ -118,24 +97,10 @@ def test_iterator(self):

except AuthenticationConfigError:
raise nose.SkipTest
except httplib2.ServerNotFoundError:
try:
h = httplib2.Http()
response, content = h.request("http://www.google.com")
raise
except httplib2.ServerNotFoundError:
raise nose.SkipTest

@slow
@network
@with_connectivity_check("http://www.google.com")
def test_segment(self):
try:
import httplib2
from pandas.io.ga import GAnalytics, read_ga
from pandas.io.auth import AuthenticationConfigError
except ImportError:
raise nose.SkipTest

try:
end_date = datetime.now()
start_date = end_date - pd.offsets.Day() * 5
Expand Down Expand Up @@ -186,16 +151,7 @@ def test_segment(self):

except AuthenticationConfigError:
raise nose.SkipTest
except httplib2.ServerNotFoundError:
try:
h = httplib2.Http()
response, content = h.request("http://www.google.com")
raise
except httplib2.ServerNotFoundError:
raise nose.SkipTest


if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
131 changes: 40 additions & 91 deletions pandas/io/tests/test_google.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,122 +2,71 @@
import nose
from datetime import datetime

import numpy as np
import pandas as pd
import pandas.io.data as web
from pandas.util.testing import (network, assert_series_equal)
from numpy.testing.decorators import slow
import numpy as np

import urllib2
from pandas.util.testing import network, with_connectivity_check


class TestGoogle(unittest.TestCase):

@network
@with_connectivity_check("http://www.google.com")
def test_google(self):
# asserts that google is minimally working and that it throws
# an excecption when DataReader can't get a 200 response from
# an exception when DataReader can't get a 200 response from
# google
start = datetime(2010, 1, 1)
end = datetime(2013, 01, 27)

try:
self.assertEquals(
web.DataReader("F", 'google', start, end)['Close'][-1],
13.68)
except urllib2.URLError:
try:
urllib2.urlopen('http://www.google.com')
except urllib2.URLError:
raise nose.SkipTest
else:
raise

@network
def test_google_non_existent(self):
# asserts that google is minimally working and that it throws
# an excecption when DataReader can't get a 200 response from
# google
start = datetime(2010, 1, 1)
end = datetime(2013, 01, 27)
self.assertEquals(
web.DataReader("F", 'google', start, end)['Close'][-1],
13.68)

try:
self.assertRaises(
Exception,
lambda: web.DataReader("NON EXISTENT TICKER", 'google',
start, end))
except urllib2.URLError:
try:
urllib2.urlopen('http://www.google.com')
except urllib2.URLError:
raise nose.SkipTest
else:
raise
self.assertRaises(
Exception,
lambda: web.DataReader("NON EXISTENT TICKER", 'google',
start, end))


@network
def test_get_quote(self):
self.assertRaises(NotImplementedError,
lambda: web.get_quote_google(pd.Series(['GOOG', 'AAPL', 'GOOG'])))

@network
@with_connectivity_check('http://www.google.com')
def test_get_goog_volume(self):
try:
df = web.get_data_google('GOOG')
assert df.Volume.ix['OCT-08-2010'] == 2863473
except IOError:
try:
urllib2.urlopen('http://www.google.com')
except IOError:
raise nose.SkipTest
else:
raise
df = web.get_data_google('GOOG')
assert df.Volume.ix['OCT-08-2010'] == 2863473

@network
@with_connectivity_check('http://www.google.com')
def test_get_multi1(self):
try:
sl = ['AAPL', 'AMZN', 'GOOG']
pan = web.get_data_google(sl, '2012')
ts = pan.Close.GOOG.index[pan.Close.AAPL > pan.Close.GOOG]
assert ts[0].dayofyear == 96
except IOError:
try:
urllib2.urlopen('http://www.google.com')
except IOError:
raise nose.SkipTest
else:
raise
sl = ['AAPL', 'AMZN', 'GOOG']
pan = web.get_data_google(sl, '2012')
ts = pan.Close.GOOG.index[pan.Close.AAPL > pan.Close.GOOG]
assert ts[0].dayofyear == 96

@network
@with_connectivity_check('http://www.google.com')
def test_get_multi2(self):
try:
pan = web.get_data_google(['GE', 'MSFT', 'INTC'], 'JAN-01-12', 'JAN-31-12')
expected = [19.02, 28.23, 25.39]
result = pan.Close.ix['01-18-12'][['GE', 'MSFT', 'INTC']].tolist()
assert result == expected

# sanity checking
t= np.array(result)
assert np.issubdtype(t.dtype, np.floating)
assert t.shape == (3,)

expected = [[ 18.99, 28.4 , 25.18],
[ 18.58, 28.31, 25.13],
[ 19.03, 28.16, 25.52],
[ 18.81, 28.82, 25.87]]
result = pan.Open.ix['Jan-15-12':'Jan-20-12'][['GE', 'MSFT', 'INTC']].values
assert (result == expected).all()

# sanity checking
t= np.array(pan)
assert np.issubdtype(t.dtype, np.floating)
except IOError:
try:
urllib2.urlopen('http://www.google.com')
except IOError:
raise nose.SkipTest
else:
raise
pan = web.get_data_google(['GE', 'MSFT', 'INTC'], 'JAN-01-12', 'JAN-31-12')
expected = [19.02, 28.23, 25.39]
result = pan.Close.ix['01-18-12'][['GE', 'MSFT', 'INTC']].tolist()
assert result == expected

# sanity checking
t= np.array(result)
assert np.issubdtype(t.dtype, np.floating)
assert t.shape == (3,)

expected = [[ 18.99, 28.4 , 25.18],
[ 18.58, 28.31, 25.13],
[ 19.03, 28.16, 25.52],
[ 18.81, 28.82, 25.87]]
result = pan.Open.ix['Jan-15-12':'Jan-20-12'][['GE', 'MSFT', 'INTC']].values
assert (result == expected).all()

# sanity checking
t= np.array(pan)
assert np.issubdtype(t.dtype, np.floating)

if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
Expand Down
Loading

0 comments on commit 36c1263

Please sign in to comment.