Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into load_nonindex_coords
Browse files Browse the repository at this point in the history
  • Loading branch information
gimperiale committed Oct 6, 2017
2 parents 1c30474 + 5bd4015 commit 72cf0e7
Show file tree
Hide file tree
Showing 15 changed files with 148 additions and 164 deletions.
4 changes: 4 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ matrix:
env: CONDA_ENV=py36-bottleneck-dev
- python: 3.6
env: CONDA_ENV=py36-condaforge-rc
- python: 3.6
env: CONDA_ENV=py36-pynio-dev
allow_failures:
- python: 3.6
env:
Expand All @@ -59,6 +61,8 @@ matrix:
env: CONDA_ENV=py36-bottleneck-dev
- python: 3.6
env: CONDA_ENV=py36-condaforge-rc
- python: 3.6
env: CONDA_ENV=py36-pynio-dev

before_install:
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
Expand Down
25 changes: 25 additions & 0 deletions ci/requirements-py36-pynio-dev.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: test_env
channels:
- conda-forge
- ncar
dependencies:
- python=3.6
- dask
- distributed
- h5py
- h5netcdf
- matplotlib
- netcdf4
- pynio=dev
- pytest
- numpy
- pandas
- scipy
- seaborn
- toolz
- rasterio
- bottleneck
- pip:
- coveralls
- pytest-cov
- pydap
16 changes: 16 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ Bug fixes
- ``:py:meth:`~xarray.Dataset.__init__` raises a ``MergeError`` if an
coordinate shares a name with a dimension but is comprised of arbitrary
dimensions(:issue:`1120`).

- :py:func:`~xarray.open_rasterio` method now skips rasterio.crs -attribute if
it is none.
By `Leevi Annala <https://github.com/leevei>`_.
Expand All @@ -215,6 +216,21 @@ Bug fixes
when objects other than ``Dataset`` are provided (:issue:`1555`).
By `Joe Hamman <https://github.com/jhamman>`_.

- :py:func:`xarray.concat` would eagerly load dask variables into memory if
the first argument was a numpy variable (:issue:`1588`).
By `Guido Imperiale <https://github.com/crusaderky>`_.

- Fix ``netCDF4`` backend to properly roundtrip the ``shuffle`` encoding option
(:issue:`1606`).
By `Joe Hamman <https://github.com/jhamman>`_.

- Fix bug when using ``pytest`` class decorators to skiping certain unittests.
The previous behavior unintentionally causing additional tests to be skipped
(:issue:`1531`). By `Joe Hamman <https://github.com/jhamman>`_.

- Fix pynio backend for upcoming release of pynio with python3 support
(:issue:`1611`). By `Ben Hillman <https://github/brhillman>`_.

.. _whats-new.0.9.6:

v0.9.6 (8 June 2017)
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/netCDF4_.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def _extract_nc4_variable_encoding(variable, raise_on_invalid=False,

safe_to_drop = set(['source', 'original_shape'])
valid_encodings = set(['zlib', 'complevel', 'fletcher32', 'contiguous',
'chunksizes'])
'chunksizes', 'shuffle'])
if lsd_okay:
valid_encodings.add('least_significant_digit')

Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/pynio_.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def open_store_variable(self, name, var):
def get_variables(self):
with self.ensure_open(autoclose=False):
return FrozenOrderedDict((k, self.open_store_variable(k, v))
for k, v in self.ds.variables.iteritems())
for k, v in self.ds.variables.items())

def get_attrs(self):
with self.ensure_open(autoclose=True):
Expand Down
14 changes: 8 additions & 6 deletions xarray/conventions.py
Original file line number Diff line number Diff line change
Expand Up @@ -858,12 +858,14 @@ def decode_cf_variable(var, concat_characters=True, mask_and_scale=True,
if ('_FillValue' in attributes and
not utils.equivalent(attributes['_FillValue'],
attributes['missing_value'])):
raise ValueError("Discovered conflicting _FillValue "
"and missing_value. Considering "
"opening the offending dataset using "
"decode_cf=False, corrected the attributes",
"and decoding explicitly using "
"xarray.conventions.decode_cf(ds)")
raise ValueError("Conflicting _FillValue and missing_value "
"attributes on a variable: {} vs. {}\n\n"
"Consider opening the offending dataset "
"using decode_cf=False, correcting the "
"attributes and decoding explicitly using "
"xarray.decode_cf()."
.format(attributes['_FillValue'],
attributes['missing_value']))
attributes['_FillValue'] = attributes.pop('missing_value')
fill_value = np.array(pop_to(attributes, encoding, '_FillValue'))
if fill_value.size > 1:
Expand Down
8 changes: 5 additions & 3 deletions xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,11 @@ def _dask_or_eager_func(name, eager_module=np, list_of_args=False,
"""Create a function that dispatches to dask for dask array inputs."""
if has_dask:
def f(*args, **kwargs):
dispatch_args = args[0] if list_of_args else args
if any(isinstance(a, da.Array)
for a in dispatch_args[:n_array_args]):
if list_of_args:
dispatch_args = args[0]
else:
dispatch_args = args[:n_array_args]
if any(isinstance(a, da.Array) for a in dispatch_args):
module = da
else:
module = eager_module
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,8 @@ def __init__(self, obj, group, squeeze=False, grouper=None, bins=None,
raise ValueError('index must be monotonic for resampling')
s = pd.Series(np.arange(index.size), index)
first_items = s.groupby(grouper).first()
full_index = first_items.index
if first_items.isnull().any():
full_index = first_items.index
first_items = first_items.dropna()
sbins = first_items.values.astype(np.int64)
group_indices = ([slice(i, j)
Expand Down
136 changes: 38 additions & 98 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from contextlib import contextmanager
from distutils.version import LooseVersion
import re
import importlib

import numpy as np
from numpy.testing import assert_array_equal
Expand All @@ -25,111 +26,50 @@
except ImportError:
import mock

try:
import scipy
has_scipy = True
except ImportError:
has_scipy = False

try:
import pydap.client
has_pydap = True
except ImportError:
has_pydap = False

try:
import netCDF4
has_netCDF4 = True
except ImportError:
has_netCDF4 = False


try:
import h5netcdf
has_h5netcdf = True
except ImportError:
has_h5netcdf = False


try:
import Nio
has_pynio = True
except ImportError:
has_pynio = False


try:
import dask.array
import dask
dask.set_options(get=dask.get)
has_dask = True
except ImportError:
has_dask = False


try:
import matplotlib
has_matplotlib = True
except ImportError:
has_matplotlib = False


try:
import bottleneck
if LooseVersion(bottleneck.__version__) < LooseVersion('1.1'):
raise ImportError('Fall back to numpy')
has_bottleneck = True
except ImportError:
has_bottleneck = False

try:
import rasterio
has_rasterio = True
except ImportError:
has_rasterio = False

try:
import pathlib
has_pathlib = True
except ImportError:
def _importorskip(modname, minversion=None):
try:
import pathlib2
has_pathlib = True
mod = importlib.import_module(modname)
has = True
if minversion is not None:
if LooseVersion(mod.__version__) < LooseVersion(minversion):
raise ImportError('Minimum version not satisfied')
except ImportError:
has_pathlib = False


# slighly simpler construction that the full functions.
# Generally `pytest.importorskip('package')` inline is even easier
requires_matplotlib = pytest.mark.skipif(
not has_matplotlib, reason='requires matplotlib')
requires_scipy = pytest.mark.skipif(
not has_scipy, reason='requires scipy')
requires_pydap = pytest.mark.skipif(
not has_pydap, reason='requires pydap')
requires_netCDF4 = pytest.mark.skipif(
not has_netCDF4, reason='requires netCDF4')
requires_h5netcdf = pytest.mark.skipif(
not has_h5netcdf, reason='requires h5netcdf')
requires_pynio = pytest.mark.skipif(
not has_pynio, reason='requires pynio')
requires_scipy_or_netCDF4 = pytest.mark.skipif(
not has_scipy and not has_netCDF4, reason='requires scipy or netCDF4')
requires_dask = pytest.mark.skipif(
not has_dask, reason='requires dask')
requires_bottleneck = pytest.mark.skipif(
not has_bottleneck, reason='requires bottleneck')
requires_rasterio = pytest.mark.skipif(
not has_rasterio, reason='requires rasterio')
requires_pathlib = pytest.mark.skipif(
not has_pathlib, reason='requires pathlib / pathlib2'
)

has = False
# TODO: use pytest.skipif instead of unittest.skipUnless
# Using `unittest.skipUnless` is a temporary workaround for pytest#568,
# wherein class decorators stain inherited classes.
# xref: xarray#1531, implemented in xarray #1557.
func = unittest.skipUnless(has, reason='requires {}'.format(modname))
return has, func


has_matplotlib, requires_matplotlib = _importorskip('matplotlib')
has_scipy, requires_scipy = _importorskip('scipy')
has_pydap, requires_pydap = _importorskip('pydap.client')
has_netCDF4, requires_netCDF4 = _importorskip('netCDF4')
has_h5netcdf, requires_h5netcdf = _importorskip('h5netcdf')
has_pynio, requires_pynio = _importorskip('Nio')
has_dask, requires_dask = _importorskip('dask')
has_bottleneck, requires_bottleneck = _importorskip('bottleneck')
has_rasterio, requires_rasterio = _importorskip('rasterio')
has_pathlib, requires_pathlib = _importorskip('pathlib')

# some special cases
has_scipy_or_netCDF4 = has_scipy or has_netCDF4
requires_scipy_or_netCDF4 = unittest.skipUnless(
has_scipy_or_netCDF4, reason='requires scipy or netCDF4')
if not has_pathlib:
has_pathlib, requires_pathlib = _importorskip('pathlib2')

if has_dask:
import dask
dask.set_options(get=dask.get)

try:
_SKIP_FLAKY = not pytest.config.getoption("--run-flaky")
_SKIP_NETWORK_TESTS = not pytest.config.getoption("--run-network-tests")
except ValueError:
except (ValueError, AttributeError):
# Can't get config from pytest, e.g., because xarray is installed instead
# of being run from a development version (and hence conftests.py is not
# available). Don't run flaky tests.
Expand Down
8 changes: 7 additions & 1 deletion xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,6 +729,7 @@ def test_compression_encoding(self):
data['var2'].encoding.update({'zlib': True,
'chunksizes': (5, 5),
'fletcher32': True,
'shuffle': True,
'original_shape': data.var2.shape})
with self.roundtrip(data) as actual:
for k, v in iteritems(data['var2'].encoding):
Expand Down Expand Up @@ -1179,7 +1180,7 @@ def test_encoding_unlimited_dims(self):


# tests pending h5netcdf fix
@pytest.mark.xfail
@unittest.skip
class H5NetCDFDataTestAutocloseTrue(H5NetCDFDataTest):
autoclose = True

Expand Down Expand Up @@ -1846,6 +1847,11 @@ def test_extract_nc4_variable_encoding(self):
encoding = _extract_nc4_variable_encoding(var)
self.assertEqual({}, encoding)

# regression test
var = xr.Variable(('x',), [1, 2, 3], {}, {'shuffle': True})
encoding = _extract_nc4_variable_encoding(var, raise_on_invalid=True)
self.assertEqual({'shuffle': True}, encoding)

def test_extract_h5nc_encoding(self):
# not supported with h5netcdf (yet)
var = xr.Variable(('x',), [1, 2, 3], {},
Expand Down
6 changes: 2 additions & 4 deletions xarray/tests/test_conventions.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,11 +451,9 @@ def test_cf_timedelta(self):
('1us', 'microseconds', np.int64(1)),
(['NaT', '0s', '1s'], None, [np.nan, 0, 1]),
(['30m', '60m'], 'hours', [0.5, 1.0]),
(np.timedelta64('NaT', 'ns'), 'days', np.nan),
(['NaT', 'NaT'], 'days', [np.nan, np.nan]),
]
if pd.__version__ >= '0.16':
# not quite sure why, but these examples don't work on older pandas
examples.extend([(np.timedelta64('NaT', 'ns'), 'days', np.nan),
(['NaT', 'NaT'], 'days', [np.nan, np.nan])])

for timedeltas, units, numbers in examples:
timedeltas = pd.to_timedelta(timedeltas, box=False)
Expand Down
Loading

0 comments on commit 72cf0e7

Please sign in to comment.