Skip to content

Commit

Permalink
DEPS: set min versions
Browse files Browse the repository at this point in the history
closes pandas-dev#15206, numpy >= 1.9
closes pandas-dev#15543, matplotlib >= 1.4.3
scipy >= 0.14.0
  • Loading branch information
jreback committed Aug 18, 2017
1 parent 0ee1675 commit 67b6825
Show file tree
Hide file tree
Showing 24 changed files with 86 additions and 183 deletions.
2 changes: 1 addition & 1 deletion ci/requirements-2.7_COMPAT.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python=2.7*
numpy=1.7.1
numpy=1.9.2
cython=0.23
dateutil=1.5
pytz=2013b
9 changes: 5 additions & 4 deletions ci/requirements-2.7_COMPAT.run
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
numpy=1.7.1
numpy=1.9.2
dateutil=1.5
pytz=2013b
scipy=0.11.0
scipy=0.14.0
xlwt=0.7.5
xlrd=0.9.2
numexpr=2.2.2
pytables=3.0.0
bottleneck=1.0.0
numexpr=2.4.4 # this is actually unsupported for non-pytables
pytables=3.2.2
psycopg2
pymysql=0.6.0
sqlalchemy=0.7.8
Expand Down
2 changes: 1 addition & 1 deletion ci/requirements-2.7_LOCALE.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python=2.7*
python-dateutil
pytz=2013b
numpy=1.8.2
numpy=1.9.2
cython=0.23
5 changes: 3 additions & 2 deletions ci/requirements-2.7_LOCALE.run
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
python-dateutil
pytz=2013b
numpy=1.8.2
numpy=1.9.2
xlwt=0.7.5
openpyxl=1.6.2
xlsxwriter=0.5.2
xlrd=0.9.2
matplotlib=1.3.1
bottleneck=1.0.0
matplotlib=1.4.3
sqlalchemy=0.8.1
lxml=3.2.1
scipy
2 changes: 1 addition & 1 deletion ci/requirements-2.7_SLOW.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python=2.7*
python-dateutil
pytz
numpy=1.8.2
numpy=1.10*
cython
4 changes: 2 additions & 2 deletions ci/requirements-2.7_SLOW.run
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
python-dateutil
pytz
numpy=1.8.2
matplotlib=1.3.1
numpy=1.10*
matplotlib=1.4.3
scipy
patsy
xlwt
Expand Down
6 changes: 3 additions & 3 deletions doc/source/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ Dependencies
------------

* `setuptools <https://setuptools.readthedocs.io/en/latest/>`__
* `NumPy <http://www.numpy.org>`__: 1.7.1 or higher
* `NumPy <http://www.numpy.org>`__: 1.9.0 or higher
* `python-dateutil <http://labix.org/python-dateutil>`__: 1.5 or higher
* `pytz <http://pytz.sourceforge.net/>`__: Needed for time zone support

Expand Down Expand Up @@ -233,7 +233,7 @@ Optional Dependencies

* `Cython <http://www.cython.org>`__: Only necessary to build development
version. Version 0.23 or higher.
* `SciPy <http://www.scipy.org>`__: miscellaneous statistical functions
* `SciPy <http://www.scipy.org>`__: miscellaneous statistical functions, Version 0.14.0 or higher
* `xarray <http://xarray.pydata.org>`__: pandas like handling for > 2 dims, needed for converting Panels to xarray objects. Version 0.7.0 or higher is recommended.
* `PyTables <http://www.pytables.org>`__: necessary for HDF5-based storage. Version 3.0.0 or higher required, Version 3.2.1 or higher highly recommended.
* `Feather Format <https://github.com/wesm/feather>`__: necessary for feather-based storage, version 0.3.1 or higher.
Expand All @@ -244,7 +244,7 @@ Optional Dependencies
* `pymysql <https://github.com/PyMySQL/PyMySQL>`__: for MySQL.
* `SQLite <https://docs.python.org/3.5/library/sqlite3.html>`__: for SQLite, this is included in Python's standard library by default.

* `matplotlib <http://matplotlib.org/>`__: for plotting
* `matplotlib <http://matplotlib.org/>`__: for plotting, Version 1.4.3 or higher.
* For Excel I/O:

* `xlrd/xlwt <http://www.python-excel.org/>`__: Excel reading (xlrd) and writing (xlwt)
Expand Down
20 changes: 20 additions & 0 deletions doc/source/whatsnew/v0.21.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,26 @@ Other Enhancements
Backwards incompatible API changes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


.. _whatsnew_0210.api_breaking.deps:

Dependencies have increased minimum versions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

We have updated our minimum supported versions of dependencies (:issue:`15206`, :issue:`15543`, :issue:`15214`). We now require:

+--------------+-----------------+
| Package | Minimum Version |
+======================+=========+
| Numpy | 1.9.0 |
+--------------+-----------------+
| Matplotlib | 1.4.3 |
+--------------+-----------------+
| Scipy | 0.14.0 |
+--------------+-----------------+
| Bottleneck | 1.0.0 |
+--------------+-----------------+

.. _whatsnew_0210.api_breaking.pandas_eval:

Improved error handling during item assignment in pd.eval
Expand Down
2 changes: 0 additions & 2 deletions pandas/_libs/sparse.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ from distutils.version import LooseVersion

# numpy versioning
_np_version = np.version.short_version
_np_version_under1p8 = LooseVersion(_np_version) < '1.8'
_np_version_under1p9 = LooseVersion(_np_version) < '1.9'
_np_version_under1p10 = LooseVersion(_np_version) < '1.10'
_np_version_under1p11 = LooseVersion(_np_version) < '1.11'

Expand Down
14 changes: 6 additions & 8 deletions pandas/compat/numpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,18 @@
# numpy versioning
_np_version = np.__version__
_nlv = LooseVersion(_np_version)
_np_version_under1p8 = _nlv < '1.8'
_np_version_under1p9 = _nlv < '1.9'
_np_version_under1p10 = _nlv < '1.10'
_np_version_under1p11 = _nlv < '1.11'
_np_version_under1p12 = _nlv < '1.12'
_np_version_under1p13 = _nlv < '1.13'
_np_version_under1p14 = _nlv < '1.14'
_np_version_under1p15 = _nlv < '1.15'

if _nlv < '1.7.0':
if _nlv < '1.9':
raise ImportError('this version of pandas is incompatible with '
'numpy < 1.7.0\n'
'numpy < 1.9.0\n'
'your numpy version is {0}.\n'
'Please upgrade numpy to >= 1.7.0 to use '
'Please upgrade numpy to >= 1.9.0 to use '
'this pandas version'.format(_np_version))


Expand Down Expand Up @@ -70,11 +69,10 @@ def np_array_datetime64_compat(arr, *args, **kwargs):


__all__ = ['np',
'_np_version_under1p8',
'_np_version_under1p9',
'_np_version_under1p10',
'_np_version_under1p11',
'_np_version_under1p12',
'_np_version_under1p13',
'_np_version_under1p14'
'_np_version_under1p14',
'_np_version_under1p15'
]
7 changes: 2 additions & 5 deletions pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from warnings import warn, catch_warnings
import numpy as np

from pandas import compat, _np_version_under1p8
from pandas.core.dtypes.cast import maybe_promote
from pandas.core.dtypes.generic import (
ABCSeries, ABCIndex,
Expand Down Expand Up @@ -407,14 +406,12 @@ def isin(comps, values):
comps, dtype, _ = _ensure_data(comps)
values, _, _ = _ensure_data(values, dtype=dtype)

# GH11232
# work-around for numpy < 1.8 and comparisions on py3
# faster for larger cases to use np.in1d
f = lambda x, y: htable.ismember_object(x, values)

# GH16012
# Ensure np.in1d doesn't get object types or it *may* throw an exception
if ((_np_version_under1p8 and compat.PY3) or len(comps) > 1000000 and
not is_object_dtype(comps)):
if len(comps) > 1000000 and not is_object_dtype(comps):
f = lambda x, y: np.in1d(x, y)
elif is_integer_dtype(comps):
try:
Expand Down
5 changes: 1 addition & 4 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1827,11 +1827,8 @@ def _box_item_values(self, key, values):

def _maybe_cache_changed(self, item, value):
"""The object has called back to us saying maybe it has changed.
numpy < 1.8 has an issue with object arrays and aliasing
GH6026
"""
self._data.set(item, value, check=pd._np_version_under1p8)
self._data.set(item, value, check=False)

@property
def _is_cached(self):
Expand Down
8 changes: 2 additions & 6 deletions pandas/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
)

from pandas import compat
from pandas.compat.numpy import function as nv, _np_version_under1p8
from pandas.compat.numpy import function as nv
from pandas.compat import set_function_name

from pandas.core.dtypes.common import (
Expand Down Expand Up @@ -3257,11 +3257,7 @@ def value_counts(self, normalize=False, sort=True, ascending=False,
d = np.diff(np.r_[idx, len(ids)])
if dropna:
m = ids[lab == -1]
if _np_version_under1p8:
mi, ml = algorithms.factorize(m)
d[ml] = d[ml] - np.bincount(mi)
else:
np.add.at(d, m, -1)
np.add.at(d, m, -1)
acc = rep(d)[mask]
else:
acc = rep(d)
Expand Down
16 changes: 2 additions & 14 deletions pandas/core/internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,7 @@
import pandas.core.computation.expressions as expressions
from pandas.util._decorators import cache_readonly
from pandas.util._validators import validate_bool_kwarg

from pandas import compat, _np_version_under1p9
from pandas import compat
from pandas.compat import range, map, zip, u


Expand Down Expand Up @@ -857,9 +856,6 @@ def _is_empty_indexer(indexer):

# set
else:
if _np_version_under1p9:
# Work around GH 6168 to support old numpy
indexer = getattr(indexer, 'values', indexer)
values[indexer] = value

# coerce and try to infer the dtypes of the result
Expand Down Expand Up @@ -1482,15 +1478,7 @@ def quantile(self, qs, interpolation='linear', axis=0, mgr=None):
tuple of (axis, block)
"""
if _np_version_under1p9:
if interpolation != 'linear':
raise ValueError("Interpolation methods other than linear "
"are not supported in numpy < 1.9.")

kw = {}
if not _np_version_under1p9:
kw.update({'interpolation': interpolation})

kw = {'interpolation': interpolation}
values = self.get_values()
values, _, _, _ = self._try_coerce_args(values, values)

Expand Down
42 changes: 0 additions & 42 deletions pandas/tests/frame/test_quantile.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from pandas.util.testing import assert_series_equal, assert_frame_equal

import pandas.util.testing as tm
from pandas import _np_version_under1p9

from pandas.tests.frame.common import TestData

Expand Down Expand Up @@ -103,9 +102,6 @@ def test_quantile_axis_parameter(self):

def test_quantile_interpolation(self):
# see gh-10174
if _np_version_under1p9:
pytest.skip("Numpy version under 1.9")

from numpy import percentile

# interpolation = linear (default case)
Expand Down Expand Up @@ -166,44 +162,6 @@ def test_quantile_interpolation(self):
index=[.25, .5], columns=['a', 'b', 'c'])
assert_frame_equal(result, expected)

def test_quantile_interpolation_np_lt_1p9(self):
# see gh-10174
if not _np_version_under1p9:
pytest.skip("Numpy version is greater than 1.9")

from numpy import percentile

# interpolation = linear (default case)
q = self.tsframe.quantile(0.1, axis=0, interpolation='linear')
assert q['A'] == percentile(self.tsframe['A'], 10)
q = self.intframe.quantile(0.1)
assert q['A'] == percentile(self.intframe['A'], 10)

# test with and without interpolation keyword
q1 = self.intframe.quantile(0.1)
assert q1['A'] == np.percentile(self.intframe['A'], 10)
assert_series_equal(q, q1)

# interpolation method other than default linear
msg = "Interpolation methods other than linear"
df = DataFrame({"A": [1, 2, 3], "B": [2, 3, 4]}, index=[1, 2, 3])
with tm.assert_raises_regex(ValueError, msg):
df.quantile(.5, axis=1, interpolation='nearest')

with tm.assert_raises_regex(ValueError, msg):
df.quantile([.5, .75], axis=1, interpolation='lower')

# test degenerate case
df = DataFrame({'x': [], 'y': []})
with tm.assert_raises_regex(ValueError, msg):
q = df.quantile(0.1, axis=0, interpolation='higher')

# multi
df = DataFrame([[1, 1, 1], [2, 2, 2], [3, 3, 3]],
columns=['a', 'b', 'c'])
with tm.assert_raises_regex(ValueError, msg):
df.quantile([.25, .5], interpolation='midpoint')

def test_quantile_multi(self):
df = DataFrame([[1, 1, 1], [2, 2, 2], [3, 3, 3]],
columns=['a', 'b', 'c'])
Expand Down
8 changes: 2 additions & 6 deletions pandas/tests/indexes/datetimes/test_datetime.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pandas.compat import lrange
from pandas.compat.numpy import np_datetime64_compat
from pandas import (DatetimeIndex, Index, date_range, Series, DataFrame,
Timestamp, datetime, offsets, _np_version_under1p8)
Timestamp, datetime, offsets)

from pandas.util.testing import assert_series_equal, assert_almost_equal

Expand Down Expand Up @@ -276,11 +276,7 @@ def test_comparisons_nat(self):
np_datetime64_compat('2014-06-01 00:00Z'),
np_datetime64_compat('2014-07-01 00:00Z')])

if _np_version_under1p8:
# cannot test array because np.datetime('nat') returns today's date
cases = [(fidx1, fidx2), (didx1, didx2)]
else:
cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]

# Check pd.NaT is handles as the same as np.nan
with tm.assert_produces_warning(None):
Expand Down
Loading

0 comments on commit 67b6825

Please sign in to comment.