From 350b0b43b18a2de23511a626ed091d54d6a42632 Mon Sep 17 00:00:00 2001 From: eschalk Date: Sun, 18 Feb 2024 17:29:27 +0100 Subject: [PATCH] Format with preview flag --- asv_bench/benchmarks/combine.py | 30 +- asv_bench/benchmarks/dataset.py | 10 +- asv_bench/benchmarks/groupby.py | 12 +- asv_bench/benchmarks/pandas.py | 12 +- ci/min_deps_check.py | 23 +- doc/conf.py | 8 +- doc/examples/apply_ufunc_vectorize_1d.ipynb | 14 +- pyproject.toml | 1 + xarray/backends/api.py | 63 +-- xarray/backends/locks.py | 6 +- xarray/backends/plugins.py | 6 +- xarray/backends/pydap_.py | 10 +- xarray/backends/zarr.py | 6 +- xarray/coding/cftimeindex.py | 6 +- xarray/coding/times.py | 27 +- xarray/convert.py | 3 +- xarray/core/_typed_ops.py | 108 ++-- xarray/core/accessor_dt.py | 10 +- xarray/core/alignment.py | 36 +- xarray/core/common.py | 51 +- xarray/core/computation.py | 27 +- xarray/core/concat.py | 6 +- xarray/core/coordinates.py | 18 +- xarray/core/dataarray.py | 68 +-- xarray/core/dataset.py | 144 +++--- xarray/core/extensions.py | 7 +- xarray/core/formatting.py | 21 +- xarray/core/indexes.py | 12 +- xarray/core/merge.py | 30 +- xarray/core/nputils.py | 6 +- xarray/core/parallel.py | 13 +- xarray/core/rolling.py | 6 +- xarray/core/types.py | 24 +- xarray/core/utils.py | 34 +- xarray/namedarray/_typing.py | 69 +-- xarray/namedarray/core.py | 24 +- xarray/plot/accessor.py | 99 ++-- xarray/plot/dataarray_plot.py | 78 +-- xarray/plot/dataset_plot.py | 27 +- xarray/plot/utils.py | 18 +- xarray/testing/strategies.py | 9 +- xarray/tests/test_accessor_dt.py | 72 ++- xarray/tests/test_accessor_str.py | 119 +++-- xarray/tests/test_backends.py | 290 +++++------ xarray/tests/test_backends_api.py | 16 +- xarray/tests/test_cftimeindex.py | 72 ++- xarray/tests/test_coarsen.py | 14 +- xarray/tests/test_combine.py | 81 +-- xarray/tests/test_computation.py | 12 +- xarray/tests/test_concat.py | 54 +- xarray/tests/test_conventions.py | 187 ++++--- xarray/tests/test_dask.py | 28 +- xarray/tests/test_dataarray.py | 183 ++++--- xarray/tests/test_dataset.py | 537 +++++++++----------- xarray/tests/test_distributed.py | 2 +- xarray/tests/test_duck_array_ops.py | 68 ++- xarray/tests/test_formatting.py | 10 +- xarray/tests/test_groupby.py | 275 +++++----- xarray/tests/test_indexing.py | 23 +- xarray/tests/test_interp.py | 14 +- xarray/tests/test_merge.py | 21 +- xarray/tests/test_namedarray.py | 12 +- xarray/tests/test_plot.py | 46 +- xarray/tests/test_plugins.py | 14 +- xarray/tests/test_rolling.py | 43 +- xarray/tests/test_units.py | 111 ++-- xarray/util/generate_aggregations.py | 6 +- xarray/util/print_versions.py | 28 +- 68 files changed, 1571 insertions(+), 1949 deletions(-) diff --git a/asv_bench/benchmarks/combine.py b/asv_bench/benchmarks/combine.py index 772d888306c..66c7fca2426 100644 --- a/asv_bench/benchmarks/combine.py +++ b/asv_bench/benchmarks/combine.py @@ -16,9 +16,9 @@ def setup(self) -> None: data = np.random.randn(t_size) self.dsA0 = xr.Dataset({"A": xr.DataArray(data, coords={"T": t}, dims=("T"))}) - self.dsA1 = xr.Dataset( - {"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T"))} - ) + self.dsA1 = xr.Dataset({ + "A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T")) + }) def time_combine_by_coords(self) -> None: """Also has to load and arrange t coordinate""" @@ -54,18 +54,18 @@ def setup(self): t = np.arange(t_size) data = np.random.randn(t_size, x_size, y_size) - self.dsA0 = xr.Dataset( - {"A": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))} - ) - self.dsA1 = xr.Dataset( - {"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))} - ) - self.dsB0 = xr.Dataset( - {"B": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))} - ) - self.dsB1 = xr.Dataset( - {"B": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))} - ) + self.dsA0 = xr.Dataset({ + "A": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y")) + }) + self.dsA1 = xr.Dataset({ + "A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y")) + }) + self.dsB0 = xr.Dataset({ + "B": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y")) + }) + self.dsB1 = xr.Dataset({ + "B": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y")) + }) def time_combine_nested(self): datasets = [[self.dsA0, self.dsA1], [self.dsB0, self.dsB1]] diff --git a/asv_bench/benchmarks/dataset.py b/asv_bench/benchmarks/dataset.py index d8a6d6df9d8..fee73a450e8 100644 --- a/asv_bench/benchmarks/dataset.py +++ b/asv_bench/benchmarks/dataset.py @@ -7,12 +7,10 @@ class DatasetBinaryOp: def setup(self): - self.ds = Dataset( - { - "a": (("x", "y"), np.ones((300, 400))), - "b": (("x", "y"), np.ones((300, 400))), - } - ) + self.ds = Dataset({ + "a": (("x", "y"), np.ones((300, 400))), + "b": (("x", "y"), np.ones((300, 400))), + }) self.mean = self.ds.mean() self.std = self.ds.std() diff --git a/asv_bench/benchmarks/groupby.py b/asv_bench/benchmarks/groupby.py index 1b3e55fa659..a5cc083e90f 100644 --- a/asv_bench/benchmarks/groupby.py +++ b/asv_bench/benchmarks/groupby.py @@ -11,13 +11,11 @@ class GroupBy: def setup(self, *args, **kwargs): self.n = 100 - self.ds1d = xr.Dataset( - { - "a": xr.DataArray(np.r_[np.repeat(1, self.n), np.repeat(2, self.n)]), - "b": xr.DataArray(np.arange(2 * self.n)), - "c": xr.DataArray(np.arange(2 * self.n)), - } - ) + self.ds1d = xr.Dataset({ + "a": xr.DataArray(np.r_[np.repeat(1, self.n), np.repeat(2, self.n)]), + "b": xr.DataArray(np.arange(2 * self.n)), + "c": xr.DataArray(np.arange(2 * self.n)), + }) self.ds2d = self.ds1d.expand_dims(z=10).copy() self.ds1d_mean = self.ds1d.groupby("b").mean() self.ds2d_mean = self.ds2d.groupby("b").mean() diff --git a/asv_bench/benchmarks/pandas.py b/asv_bench/benchmarks/pandas.py index ebe61081916..9ae744b76e3 100644 --- a/asv_bench/benchmarks/pandas.py +++ b/asv_bench/benchmarks/pandas.py @@ -9,13 +9,11 @@ class MultiIndexSeries: def setup(self, dtype, subset): data = np.random.rand(100000).astype(dtype) - index = pd.MultiIndex.from_product( - [ - list("abcdefhijk"), - list("abcdefhijk"), - pd.date_range(start="2000-01-01", periods=1000, freq="D"), - ] - ) + index = pd.MultiIndex.from_product([ + list("abcdefhijk"), + list("abcdefhijk"), + pd.date_range(start="2000-01-01", periods=1000, freq="D"), + ]) series = pd.Series(data, index) if subset: series = series[::3] diff --git a/ci/min_deps_check.py b/ci/min_deps_check.py index 48ea323ed81..519dc1f928a 100755 --- a/ci/min_deps_check.py +++ b/ci/min_deps_check.py @@ -3,6 +3,7 @@ publication date. Compare it against requirements/min-all-deps.yml to verify the policy on obsolete dependencies is being followed. Print a pretty report :) """ + from __future__ import annotations import itertools @@ -104,18 +105,16 @@ def metadata(entry): # Hardcoded fix to work around incorrect dates in conda if pkg == "python": - out.update( - { - (2, 7): datetime(2010, 6, 3), - (3, 5): datetime(2015, 9, 13), - (3, 6): datetime(2016, 12, 23), - (3, 7): datetime(2018, 6, 27), - (3, 8): datetime(2019, 10, 14), - (3, 9): datetime(2020, 10, 5), - (3, 10): datetime(2021, 10, 4), - (3, 11): datetime(2022, 10, 24), - } - ) + out.update({ + (2, 7): datetime(2010, 6, 3), + (3, 5): datetime(2015, 9, 13), + (3, 6): datetime(2016, 12, 23), + (3, 7): datetime(2018, 6, 27), + (3, 8): datetime(2019, 10, 14), + (3, 9): datetime(2020, 10, 5), + (3, 10): datetime(2021, 10, 4), + (3, 11): datetime(2022, 10, 24), + }) return out diff --git a/doc/conf.py b/doc/conf.py index 4bbceddba3d..671f9df5670 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -52,11 +52,9 @@ try: import cartopy # noqa: F401 except ImportError: - allowed_failures.update( - [ - "gallery/plot_cartopy_facetgrid.py", - ] - ) + allowed_failures.update([ + "gallery/plot_cartopy_facetgrid.py", + ]) nbsphinx_allow_errors = False diff --git a/doc/examples/apply_ufunc_vectorize_1d.ipynb b/doc/examples/apply_ufunc_vectorize_1d.ipynb index de8aa397869..7ca8300d0a6 100644 --- a/doc/examples/apply_ufunc_vectorize_1d.ipynb +++ b/doc/examples/apply_ufunc_vectorize_1d.ipynb @@ -520,9 +520,10 @@ "\n", "interped = xr.apply_ufunc(\n", " interp1d_np, # first the function\n", - " air.chunk(\n", - " {\"time\": 2, \"lon\": 2}\n", - " ), # now arguments in the order expected by 'interp1_np'\n", + " air.chunk({\n", + " \"time\": 2,\n", + " \"lon\": 2,\n", + " }), # now arguments in the order expected by 'interp1_np'\n", " air.lat, # as above\n", " newlat, # as above\n", " input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n", @@ -617,9 +618,10 @@ "source": [ "interped = xr.apply_ufunc(\n", " interp1d_np_gufunc, # first the function\n", - " air.chunk(\n", - " {\"time\": 2, \"lon\": 2}\n", - " ), # now arguments in the order expected by 'interp1_np'\n", + " air.chunk({\n", + " \"time\": 2,\n", + " \"lon\": 2,\n", + " }), # now arguments in the order expected by 'interp1_np'\n", " air.lat, # as above\n", " newlat, # as above\n", " input_core_dims=[[\"lat\"], [\"lat\"], [\"new_lat\"]], # list with one entry per arg\n", diff --git a/pyproject.toml b/pyproject.toml index 6ad5361fd64..ea669328ace 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -288,6 +288,7 @@ convention = "numpy" [tool.ruff.format] docstring-code-format = true +preview = true [tool.pytest.ini_options] addopts = ["--strict-config", "--strict-markers"] diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 18347cafe1e..c685aada284 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1132,8 +1132,7 @@ def to_netcdf( *, multifile: Literal[True], invalid_netcdf: bool = False, -) -> tuple[ArrayWriter, AbstractDataStore]: - ... +) -> tuple[ArrayWriter, AbstractDataStore]: ... # path=None writes to bytes @@ -1150,8 +1149,7 @@ def to_netcdf( compute: bool = True, multifile: Literal[False] = False, invalid_netcdf: bool = False, -) -> bytes: - ... +) -> bytes: ... # compute=False returns dask.Delayed @@ -1169,8 +1167,7 @@ def to_netcdf( compute: Literal[False], multifile: Literal[False] = False, invalid_netcdf: bool = False, -) -> Delayed: - ... +) -> Delayed: ... # default return None @@ -1187,8 +1184,7 @@ def to_netcdf( compute: Literal[True] = True, multifile: Literal[False] = False, invalid_netcdf: bool = False, -) -> None: - ... +) -> None: ... # if compute cannot be evaluated at type check time @@ -1206,8 +1202,7 @@ def to_netcdf( compute: bool = False, multifile: Literal[False] = False, invalid_netcdf: bool = False, -) -> Delayed | None: - ... +) -> Delayed | None: ... # if multifile cannot be evaluated at type check time @@ -1225,8 +1220,7 @@ def to_netcdf( compute: bool = False, multifile: bool = False, invalid_netcdf: bool = False, -) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None: - ... +) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None: ... # Any @@ -1243,8 +1237,7 @@ def to_netcdf( compute: bool = False, multifile: bool = False, invalid_netcdf: bool = False, -) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: - ... +) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: ... def to_netcdf( @@ -1499,22 +1492,20 @@ def save_mfdataset( "save_mfdataset" ) - writers, stores = zip( - *[ - to_netcdf( - ds, - path, - mode, - format, - group, - engine, - compute=compute, - multifile=True, - **kwargs, - ) - for ds, path, group in zip(datasets, paths, groups) - ] - ) + writers, stores = zip(*[ + to_netcdf( + ds, + path, + mode, + format, + group, + engine, + compute=compute, + multifile=True, + **kwargs, + ) + for ds, path, group in zip(datasets, paths, groups) + ]) try: writes = [w.sync(compute=compute) for w in writers] @@ -1526,9 +1517,9 @@ def save_mfdataset( if not compute: import dask - return dask.delayed( - [dask.delayed(_finalize_store)(w, s) for w, s in zip(writes, stores)] - ) + return dask.delayed([ + dask.delayed(_finalize_store)(w, s) for w, s in zip(writes, stores) + ]) def _auto_detect_region(ds_new, ds_orig, dim): @@ -1678,8 +1669,7 @@ def to_zarr( zarr_version: int | None = None, write_empty_chunks: bool | None = None, chunkmanager_store_kwargs: dict[str, Any] | None = None, -) -> backends.ZarrStore: - ... +) -> backends.ZarrStore: ... # compute=False returns dask.Delayed @@ -1702,8 +1692,7 @@ def to_zarr( zarr_version: int | None = None, write_empty_chunks: bool | None = None, chunkmanager_store_kwargs: dict[str, Any] | None = None, -) -> Delayed: - ... +) -> Delayed: ... def to_zarr( diff --git a/xarray/backends/locks.py b/xarray/backends/locks.py index 045ee522fa8..69cef309b45 100644 --- a/xarray/backends/locks.py +++ b/xarray/backends/locks.py @@ -40,9 +40,9 @@ class SerializableLock: The creation of locks is itself not threadsafe. """ - _locks: ClassVar[ - WeakValueDictionary[Hashable, threading.Lock] - ] = WeakValueDictionary() + _locks: ClassVar[WeakValueDictionary[Hashable, threading.Lock]] = ( + WeakValueDictionary() + ) token: Hashable lock: threading.Lock diff --git a/xarray/backends/plugins.py b/xarray/backends/plugins.py index 6c7d7e10055..bcdcaf09195 100644 --- a/xarray/backends/plugins.py +++ b/xarray/backends/plugins.py @@ -97,9 +97,9 @@ def sort_backends( for be_name in STANDARD_BACKENDS_ORDER: if backend_entrypoints.get(be_name, None) is not None: ordered_backends_entrypoints[be_name] = backend_entrypoints.pop(be_name) - ordered_backends_entrypoints.update( - {name: backend_entrypoints[name] for name in sorted(backend_entrypoints)} - ) + ordered_backends_entrypoints.update({ + name: backend_entrypoints[name] for name in sorted(backend_entrypoints) + }) return ordered_backends_entrypoints diff --git a/xarray/backends/pydap_.py b/xarray/backends/pydap_.py index 5a475a7c3be..ad63b4bca2b 100644 --- a/xarray/backends/pydap_.py +++ b/xarray/backends/pydap_.py @@ -72,12 +72,10 @@ def _fix_attributes(attributes): elif is_dict_like(attributes[k]): # Make Hierarchical attributes to a single level with a # dot-separated key - attributes.update( - { - f"{k}.{k_child}": v_child - for k_child, v_child in attributes.pop(k).items() - } - ) + attributes.update({ + f"{k}.{k_child}": v_child + for k_child, v_child in attributes.pop(k).items() + }) return attributes diff --git a/xarray/backends/zarr.py b/xarray/backends/zarr.py index ac208da097a..0cac072818f 100644 --- a/xarray/backends/zarr.py +++ b/xarray/backends/zarr.py @@ -72,9 +72,9 @@ def __init__(self, zarr_array): self.shape = self._array.shape # preserve vlen string object dtype (GH 7328) - if self._array.filters is not None and any( - [filt.codec_id == "vlen-utf8" for filt in self._array.filters] - ): + if self._array.filters is not None and any([ + filt.codec_id == "vlen-utf8" for filt in self._array.filters + ]): dtype = coding.strings.create_vlen_dtype(str) else: dtype = self._array.dtype diff --git a/xarray/coding/cftimeindex.py b/xarray/coding/cftimeindex.py index 6898809e3b0..a1e2eb0e5e9 100644 --- a/xarray/coding/cftimeindex.py +++ b/xarray/coding/cftimeindex.py @@ -807,9 +807,9 @@ def _parse_array_of_cftime_strings(strings, date_type): ------- np.array """ - return np.array( - [_parse_iso8601_without_reso(date_type, s) for s in strings.ravel()] - ).reshape(strings.shape) + return np.array([ + _parse_iso8601_without_reso(date_type, s) for s in strings.ravel() + ]).reshape(strings.shape) def _contains_datetime_timedeltas(array): diff --git a/xarray/coding/times.py b/xarray/coding/times.py index 92bce0abeaa..08f540bee4b 100644 --- a/xarray/coding/times.py +++ b/xarray/coding/times.py @@ -74,17 +74,15 @@ _NETCDF_TIME_UNITS_NUMPY = _NETCDF_TIME_UNITS_CFTIME + ["nanoseconds"] -TIME_UNITS = frozenset( - [ - "days", - "hours", - "minutes", - "seconds", - "milliseconds", - "microseconds", - "nanoseconds", - ] -) +TIME_UNITS = frozenset([ + "days", + "hours", + "minutes", + "seconds", + "milliseconds", + "microseconds", + "nanoseconds", +]) def _is_standard_calendar(calendar: str) -> bool: @@ -208,9 +206,10 @@ def _decode_cf_datetime_dtype( # successfully. Otherwise, tracebacks end up swallowed by # Dataset.__repr__ when users try to view their lazily decoded array. values = indexing.ImplicitToExplicitIndexingAdapter(indexing.as_indexable(data)) - example_value = np.concatenate( - [first_n_items(values, 1) or [0], last_item(values) or [0]] - ) + example_value = np.concatenate([ + first_n_items(values, 1) or [0], + last_item(values) or [0], + ]) try: result = decode_cf_datetime(example_value, units, calendar, use_cftime) diff --git a/xarray/convert.py b/xarray/convert.py index b8d81ccf9f0..14df7cadb9b 100644 --- a/xarray/convert.py +++ b/xarray/convert.py @@ -1,5 +1,4 @@ -"""Functions for converting to and from xarray objects -""" +"""Functions for converting to and from xarray objects""" from collections import Counter diff --git a/xarray/core/_typed_ops.py b/xarray/core/_typed_ops.py index cb172223293..c1748e322c2 100644 --- a/xarray/core/_typed_ops.py +++ b/xarray/core/_typed_ops.py @@ -455,199 +455,163 @@ def _binary_op( raise NotImplementedError @overload - def __add__(self, other: T_DataArray) -> T_DataArray: - ... + def __add__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __add__(self, other: VarCompatible) -> Self: - ... + def __add__(self, other: VarCompatible) -> Self: ... def __add__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.add) @overload - def __sub__(self, other: T_DataArray) -> T_DataArray: - ... + def __sub__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __sub__(self, other: VarCompatible) -> Self: - ... + def __sub__(self, other: VarCompatible) -> Self: ... def __sub__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.sub) @overload - def __mul__(self, other: T_DataArray) -> T_DataArray: - ... + def __mul__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __mul__(self, other: VarCompatible) -> Self: - ... + def __mul__(self, other: VarCompatible) -> Self: ... def __mul__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.mul) @overload - def __pow__(self, other: T_DataArray) -> T_DataArray: - ... + def __pow__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __pow__(self, other: VarCompatible) -> Self: - ... + def __pow__(self, other: VarCompatible) -> Self: ... def __pow__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.pow) @overload - def __truediv__(self, other: T_DataArray) -> T_DataArray: - ... + def __truediv__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __truediv__(self, other: VarCompatible) -> Self: - ... + def __truediv__(self, other: VarCompatible) -> Self: ... def __truediv__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.truediv) @overload - def __floordiv__(self, other: T_DataArray) -> T_DataArray: - ... + def __floordiv__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __floordiv__(self, other: VarCompatible) -> Self: - ... + def __floordiv__(self, other: VarCompatible) -> Self: ... def __floordiv__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.floordiv) @overload - def __mod__(self, other: T_DataArray) -> T_DataArray: - ... + def __mod__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __mod__(self, other: VarCompatible) -> Self: - ... + def __mod__(self, other: VarCompatible) -> Self: ... def __mod__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.mod) @overload - def __and__(self, other: T_DataArray) -> T_DataArray: - ... + def __and__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __and__(self, other: VarCompatible) -> Self: - ... + def __and__(self, other: VarCompatible) -> Self: ... def __and__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.and_) @overload - def __xor__(self, other: T_DataArray) -> T_DataArray: - ... + def __xor__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __xor__(self, other: VarCompatible) -> Self: - ... + def __xor__(self, other: VarCompatible) -> Self: ... def __xor__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.xor) @overload - def __or__(self, other: T_DataArray) -> T_DataArray: - ... + def __or__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __or__(self, other: VarCompatible) -> Self: - ... + def __or__(self, other: VarCompatible) -> Self: ... def __or__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.or_) @overload - def __lshift__(self, other: T_DataArray) -> T_DataArray: - ... + def __lshift__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __lshift__(self, other: VarCompatible) -> Self: - ... + def __lshift__(self, other: VarCompatible) -> Self: ... def __lshift__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.lshift) @overload - def __rshift__(self, other: T_DataArray) -> T_DataArray: - ... + def __rshift__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __rshift__(self, other: VarCompatible) -> Self: - ... + def __rshift__(self, other: VarCompatible) -> Self: ... def __rshift__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.rshift) @overload - def __lt__(self, other: T_DataArray) -> T_DataArray: - ... + def __lt__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __lt__(self, other: VarCompatible) -> Self: - ... + def __lt__(self, other: VarCompatible) -> Self: ... def __lt__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.lt) @overload - def __le__(self, other: T_DataArray) -> T_DataArray: - ... + def __le__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __le__(self, other: VarCompatible) -> Self: - ... + def __le__(self, other: VarCompatible) -> Self: ... def __le__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.le) @overload - def __gt__(self, other: T_DataArray) -> T_DataArray: - ... + def __gt__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __gt__(self, other: VarCompatible) -> Self: - ... + def __gt__(self, other: VarCompatible) -> Self: ... def __gt__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.gt) @overload - def __ge__(self, other: T_DataArray) -> T_DataArray: - ... + def __ge__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __ge__(self, other: VarCompatible) -> Self: - ... + def __ge__(self, other: VarCompatible) -> Self: ... def __ge__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, operator.ge) @overload # type:ignore[override] - def __eq__(self, other: T_DataArray) -> T_DataArray: - ... + def __eq__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __eq__(self, other: VarCompatible) -> Self: - ... + def __eq__(self, other: VarCompatible) -> Self: ... def __eq__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, nputils.array_eq) @overload # type:ignore[override] - def __ne__(self, other: T_DataArray) -> T_DataArray: - ... + def __ne__(self, other: T_DataArray) -> T_DataArray: ... @overload - def __ne__(self, other: VarCompatible) -> Self: - ... + def __ne__(self, other: VarCompatible) -> Self: ... def __ne__(self, other: VarCompatible) -> Self | T_DataArray: return self._binary_op(other, nputils.array_ne) diff --git a/xarray/core/accessor_dt.py b/xarray/core/accessor_dt.py index 41b982d268b..38a1aaac4b9 100644 --- a/xarray/core/accessor_dt.py +++ b/xarray/core/accessor_dt.py @@ -84,12 +84,10 @@ def _access_through_series(values, name): # test for and apply needed dtype hasna = any(field_values.year.isnull()) if hasna: - field_values = np.dstack( - [ - getattr(field_values, name).astype(np.float64, copy=False).values - for name in ["year", "week", "day"] - ] - ) + field_values = np.dstack([ + getattr(field_values, name).astype(np.float64, copy=False).values + for name in ["year", "week", "day"] + ]) else: field_values = np.array(field_values, dtype=np.int64) # isocalendar returns iso- year, week, and weekday -> reshape diff --git a/xarray/core/alignment.py b/xarray/core/alignment.py index 0339bddb904..13e3400d170 100644 --- a/xarray/core/alignment.py +++ b/xarray/core/alignment.py @@ -599,8 +599,7 @@ def align( indexes=None, exclude: str | Iterable[Hashable] = frozenset(), fill_value=dtypes.NA, -) -> tuple[T_Obj1]: - ... +) -> tuple[T_Obj1]: ... @overload @@ -614,8 +613,7 @@ def align( indexes=None, exclude: str | Iterable[Hashable] = frozenset(), fill_value=dtypes.NA, -) -> tuple[T_Obj1, T_Obj2]: - ... +) -> tuple[T_Obj1, T_Obj2]: ... @overload @@ -630,8 +628,7 @@ def align( indexes=None, exclude: str | Iterable[Hashable] = frozenset(), fill_value=dtypes.NA, -) -> tuple[T_Obj1, T_Obj2, T_Obj3]: - ... +) -> tuple[T_Obj1, T_Obj2, T_Obj3]: ... @overload @@ -647,8 +644,7 @@ def align( indexes=None, exclude: str | Iterable[Hashable] = frozenset(), fill_value=dtypes.NA, -) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4]: - ... +) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4]: ... @overload @@ -665,8 +661,7 @@ def align( indexes=None, exclude: str | Iterable[Hashable] = frozenset(), fill_value=dtypes.NA, -) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4, T_Obj5]: - ... +) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4, T_Obj5]: ... @overload @@ -677,8 +672,7 @@ def align( indexes=None, exclude: str | Iterable[Hashable] = frozenset(), fill_value=dtypes.NA, -) -> tuple[T_Alignable, ...]: - ... +) -> tuple[T_Alignable, ...]: ... def align( @@ -1096,15 +1090,13 @@ def _broadcast_dataset(ds: T_Dataset) -> T_Dataset: @overload def broadcast( obj1: T_Obj1, /, *, exclude: str | Iterable[Hashable] | None = None -) -> tuple[T_Obj1]: - ... +) -> tuple[T_Obj1]: ... @overload def broadcast( obj1: T_Obj1, obj2: T_Obj2, /, *, exclude: str | Iterable[Hashable] | None = None -) -> tuple[T_Obj1, T_Obj2]: - ... +) -> tuple[T_Obj1, T_Obj2]: ... @overload @@ -1115,8 +1107,7 @@ def broadcast( /, *, exclude: str | Iterable[Hashable] | None = None, -) -> tuple[T_Obj1, T_Obj2, T_Obj3]: - ... +) -> tuple[T_Obj1, T_Obj2, T_Obj3]: ... @overload @@ -1128,8 +1119,7 @@ def broadcast( /, *, exclude: str | Iterable[Hashable] | None = None, -) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4]: - ... +) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4]: ... @overload @@ -1142,15 +1132,13 @@ def broadcast( /, *, exclude: str | Iterable[Hashable] | None = None, -) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4, T_Obj5]: - ... +) -> tuple[T_Obj1, T_Obj2, T_Obj3, T_Obj4, T_Obj5]: ... @overload def broadcast( *args: T_Alignable, exclude: str | Iterable[Hashable] | None = None -) -> tuple[T_Alignable, ...]: - ... +) -> tuple[T_Alignable, ...]: ... def broadcast( diff --git a/xarray/core/common.py b/xarray/core/common.py index d5d08fa8aa6..a95217c62d6 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -200,12 +200,10 @@ def __iter__(self: Any) -> Iterator[Any]: return self._iter() @overload - def get_axis_num(self, dim: Iterable[Hashable]) -> tuple[int, ...]: - ... + def get_axis_num(self, dim: Iterable[Hashable]) -> tuple[int, ...]: ... @overload - def get_axis_num(self, dim: Hashable) -> int: - ... + def get_axis_num(self, dim: Hashable) -> int: ... def get_axis_num(self, dim: Hashable | Iterable[Hashable]) -> int | tuple[int, ...]: """Return axis number(s) corresponding to dimension(s) in this array. @@ -1500,8 +1498,7 @@ def full_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> DataArray: - ... +) -> DataArray: ... @overload @@ -1513,8 +1510,7 @@ def full_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset: - ... +) -> Dataset: ... @overload @@ -1526,8 +1522,7 @@ def full_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Variable: - ... +) -> Variable: ... @overload @@ -1539,8 +1534,7 @@ def full_like( chunks: T_Chunks = {}, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset | DataArray: - ... +) -> Dataset | DataArray: ... @overload @@ -1552,8 +1546,7 @@ def full_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset | DataArray | Variable: - ... +) -> Dataset | DataArray | Variable: ... def full_like( @@ -1794,8 +1787,7 @@ def zeros_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> DataArray: - ... +) -> DataArray: ... @overload @@ -1806,8 +1798,7 @@ def zeros_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset: - ... +) -> Dataset: ... @overload @@ -1818,8 +1809,7 @@ def zeros_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Variable: - ... +) -> Variable: ... @overload @@ -1830,8 +1820,7 @@ def zeros_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset | DataArray: - ... +) -> Dataset | DataArray: ... @overload @@ -1842,8 +1831,7 @@ def zeros_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset | DataArray | Variable: - ... +) -> Dataset | DataArray | Variable: ... def zeros_like( @@ -1936,8 +1924,7 @@ def ones_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> DataArray: - ... +) -> DataArray: ... @overload @@ -1948,8 +1935,7 @@ def ones_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset: - ... +) -> Dataset: ... @overload @@ -1960,8 +1946,7 @@ def ones_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Variable: - ... +) -> Variable: ... @overload @@ -1972,8 +1957,7 @@ def ones_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset | DataArray: - ... +) -> Dataset | DataArray: ... @overload @@ -1984,8 +1968,7 @@ def ones_like( chunks: T_Chunks = None, chunked_array_type: str | None = None, from_array_kwargs: dict[str, Any] | None = None, -) -> Dataset | DataArray | Variable: - ... +) -> Dataset | DataArray | Variable: ... def ones_like( diff --git a/xarray/core/computation.py b/xarray/core/computation.py index 72bd8fe59b4..f29f6c4dd35 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -2050,29 +2050,25 @@ def where(cond, x, y, keep_attrs=None): @overload def polyval( coord: DataArray, coeffs: DataArray, degree_dim: Hashable = "degree" -) -> DataArray: - ... +) -> DataArray: ... @overload def polyval( coord: DataArray, coeffs: Dataset, degree_dim: Hashable = "degree" -) -> Dataset: - ... +) -> Dataset: ... @overload def polyval( coord: Dataset, coeffs: DataArray, degree_dim: Hashable = "degree" -) -> Dataset: - ... +) -> Dataset: ... @overload def polyval( coord: Dataset, coeffs: Dataset, degree_dim: Hashable = "degree" -) -> Dataset: - ... +) -> Dataset: ... @overload @@ -2080,8 +2076,7 @@ def polyval( coord: Dataset | DataArray, coeffs: Dataset | DataArray, degree_dim: Hashable = "degree", -) -> Dataset | DataArray: - ... +) -> Dataset | DataArray: ... def polyval( @@ -2250,23 +2245,19 @@ def _calc_idxminmax( @overload -def unify_chunks(__obj: _T) -> tuple[_T]: - ... +def unify_chunks(__obj: _T) -> tuple[_T]: ... @overload -def unify_chunks(__obj1: _T, __obj2: _U) -> tuple[_T, _U]: - ... +def unify_chunks(__obj1: _T, __obj2: _U) -> tuple[_T, _U]: ... @overload -def unify_chunks(__obj1: _T, __obj2: _U, __obj3: _V) -> tuple[_T, _U, _V]: - ... +def unify_chunks(__obj1: _T, __obj2: _U, __obj3: _V) -> tuple[_T, _U, _V]: ... @overload -def unify_chunks(*objects: Dataset | DataArray) -> tuple[Dataset | DataArray, ...]: - ... +def unify_chunks(*objects: Dataset | DataArray) -> tuple[Dataset | DataArray, ...]: ... def unify_chunks(*objects: Dataset | DataArray) -> tuple[Dataset | DataArray, ...]: diff --git a/xarray/core/concat.py b/xarray/core/concat.py index 8411e136cc2..d95cbccd36a 100644 --- a/xarray/core/concat.py +++ b/xarray/core/concat.py @@ -42,8 +42,7 @@ def concat( fill_value: object = dtypes.NA, join: JoinOptions = "outer", combine_attrs: CombineAttrsOptions = "override", -) -> T_Dataset: - ... +) -> T_Dataset: ... @overload @@ -57,8 +56,7 @@ def concat( fill_value: object = dtypes.NA, join: JoinOptions = "outer", combine_attrs: CombineAttrsOptions = "override", -) -> T_DataArray: - ... +) -> T_DataArray: ... def concat( diff --git a/xarray/core/coordinates.py b/xarray/core/coordinates.py index 2adc4527285..64dbe505e8f 100644 --- a/xarray/core/coordinates.py +++ b/xarray/core/coordinates.py @@ -716,19 +716,17 @@ def dtypes(self) -> Frozen[Hashable, np.dtype]: -------- Dataset.dtypes """ - return Frozen( - { - n: v.dtype - for n, v in self._data._variables.items() - if n in self._data._coord_names - } - ) + return Frozen({ + n: v.dtype + for n, v in self._data._variables.items() + if n in self._data._coord_names + }) @property def variables(self) -> Mapping[Hashable, Variable]: - return Frozen( - {k: v for k, v in self._data.variables.items() if k in self._names} - ) + return Frozen({ + k: v for k, v in self._data.variables.items() if k in self._names + }) def __getitem__(self, key: Hashable) -> DataArray: if key in self._data.data_vars: diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 84dc6eeaa22..1a17a3cacd9 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -972,8 +972,7 @@ def reset_coords( names: Dims = None, *, drop: Literal[False] = False, - ) -> Dataset: - ... + ) -> Dataset: ... @overload def reset_coords( @@ -981,8 +980,7 @@ def reset_coords( names: Dims = None, *, drop: Literal[True], - ) -> Self: - ... + ) -> Self: ... @_deprecate_positional_args("v2023.10.0") def reset_coords( @@ -3560,15 +3558,13 @@ def ffill(self, dim: Hashable, limit: int | None = None) -> Self: Examples -------- - >>> temperature = np.array( - ... [ - ... [np.nan, 1, 3], - ... [0, np.nan, 5], - ... [5, np.nan, np.nan], - ... [3, np.nan, np.nan], - ... [0, 2, 0], - ... ] - ... ) + >>> temperature = np.array([ + ... [np.nan, 1, 3], + ... [0, np.nan, 5], + ... [5, np.nan, np.nan], + ... [3, np.nan, np.nan], + ... [0, 2, 0], + ... ]) >>> da = xr.DataArray( ... data=temperature, ... dims=["Y", "X"], @@ -3644,15 +3640,13 @@ def bfill(self, dim: Hashable, limit: int | None = None) -> Self: Examples -------- - >>> temperature = np.array( - ... [ - ... [0, 1, 3], - ... [0, np.nan, 5], - ... [5, np.nan, np.nan], - ... [3, np.nan, np.nan], - ... [np.nan, 2, 0], - ... ] - ... ) + >>> temperature = np.array([ + ... [0, 1, 3], + ... [0, np.nan, 5], + ... [5, np.nan, np.nan], + ... [3, np.nan, np.nan], + ... [np.nan, 2, 0], + ... ]) >>> da = xr.DataArray( ... data=temperature, ... dims=["Y", "X"], @@ -3916,8 +3910,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] | None = None, compute: bool = True, invalid_netcdf: bool = False, - ) -> bytes: - ... + ) -> bytes: ... # compute=False returns dask.Delayed @overload @@ -3933,8 +3926,7 @@ def to_netcdf( *, compute: Literal[False], invalid_netcdf: bool = False, - ) -> Delayed: - ... + ) -> Delayed: ... # default return None @overload @@ -3949,8 +3941,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] | None = None, compute: Literal[True] = True, invalid_netcdf: bool = False, - ) -> None: - ... + ) -> None: ... # if compute cannot be evaluated at type check time # we may get back either Delayed or None @@ -3966,8 +3957,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] | None = None, compute: bool = True, invalid_netcdf: bool = False, - ) -> Delayed | None: - ... + ) -> Delayed | None: ... def to_netcdf( self, @@ -4116,8 +4106,7 @@ def to_zarr( safe_chunks: bool = True, storage_options: dict[str, str] | None = None, zarr_version: int | None = None, - ) -> ZarrStore: - ... + ) -> ZarrStore: ... # compute=False returns dask.Delayed @overload @@ -4137,8 +4126,7 @@ def to_zarr( safe_chunks: bool = True, storage_options: dict[str, str] | None = None, zarr_version: int | None = None, - ) -> Delayed: - ... + ) -> Delayed: ... def to_zarr( self, @@ -6319,13 +6307,11 @@ def curvefit( ... return np.exp(-t / time_constant) * amplitude >>> t = np.arange(11) >>> da = xr.DataArray( - ... np.stack( - ... [ - ... exp_decay(t, 1, 0.1), - ... exp_decay(t, 2, 0.2), - ... exp_decay(t, 3, 0.3), - ... ] - ... ) + ... np.stack([ + ... exp_decay(t, 1, 0.1), + ... exp_decay(t, 2, 0.2), + ... exp_decay(t, 3, 0.3), + ... ]) ... + rng.normal(size=(3, t.size)) * 0.01, ... coords={"x": [0, 1, 2], "time": t}, ... ) diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index e58f4656783..a06f1986be2 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -358,9 +358,9 @@ def _get_func_args(func, param_names): params = param_names else: params = list(func_args)[1:] - if any( - [(p.kind in [p.VAR_POSITIONAL, p.VAR_KEYWORD]) for p in func_args.values()] - ): + if any([ + (p.kind in [p.VAR_POSITIONAL, p.VAR_KEYWORD]) for p in func_args.values() + ]): raise ValueError( "`param_names` must be provided because `func` takes variable length arguments." ) @@ -807,13 +807,9 @@ def dtypes(self) -> Frozen[Hashable, np.dtype]: -------- DataArray.dtype """ - return Frozen( - { - n: v.dtype - for n, v in self._variables.items() - if n not in self._coord_names - } - ) + return Frozen({ + n: v.dtype for n, v in self._variables.items() if n not in self._coord_names + }) def load(self, **kwargs) -> Self: """Manually trigger loading and/or computation of this dataset's data @@ -859,9 +855,12 @@ def load(self, **kwargs) -> Self: def __dask_tokenize__(self): from dask.base import normalize_token - return normalize_token( - (type(self), self._variables, self._coord_names, self._attrs) - ) + return normalize_token(( + type(self), + self._variables, + self._coord_names, + self._attrs, + )) def __dask_graph__(self): graphs = {k: v.__dask_graph__() for k, v in self.variables.items()} @@ -1522,13 +1521,11 @@ def loc(self) -> _LocIndexer[Self]: return _LocIndexer(self) @overload - def __getitem__(self, key: Hashable) -> DataArray: - ... + def __getitem__(self, key: Hashable) -> DataArray: ... # Mapping is Iterable @overload - def __getitem__(self, key: Iterable[Hashable]) -> Self: - ... + def __getitem__(self, key: Iterable[Hashable]) -> Self: ... def __getitem__( self, key: Mapping[Any, Any] | Hashable | Iterable[Hashable] @@ -1988,12 +1985,10 @@ def set_coords(self, names: Hashable | Iterable[Hashable]) -> Self: Examples -------- - >>> dataset = xr.Dataset( - ... { - ... "pressure": ("time", [1.013, 1.2, 3.5]), - ... "time": pd.date_range("2023-01-01", periods=3), - ... } - ... ) + >>> dataset = xr.Dataset({ + ... "pressure": ("time", [1.013, 1.2, 3.5]), + ... "time": pd.date_range("2023-01-01", periods=3), + ... }) >>> dataset Size: 48B Dimensions: (time: 3) @@ -2153,8 +2148,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] | None = None, compute: bool = True, invalid_netcdf: bool = False, - ) -> bytes: - ... + ) -> bytes: ... # compute=False returns dask.Delayed @overload @@ -2170,8 +2164,7 @@ def to_netcdf( *, compute: Literal[False], invalid_netcdf: bool = False, - ) -> Delayed: - ... + ) -> Delayed: ... # default return None @overload @@ -2186,8 +2179,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] | None = None, compute: Literal[True] = True, invalid_netcdf: bool = False, - ) -> None: - ... + ) -> None: ... # if compute cannot be evaluated at type check time # we may get back either Delayed or None @@ -2203,8 +2195,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] | None = None, compute: bool = True, invalid_netcdf: bool = False, - ) -> Delayed | None: - ... + ) -> Delayed | None: ... def to_netcdf( self, @@ -2335,8 +2326,7 @@ def to_zarr( zarr_version: int | None = None, write_empty_chunks: bool | None = None, chunkmanager_store_kwargs: dict[str, Any] | None = None, - ) -> ZarrStore: - ... + ) -> ZarrStore: ... # compute=False returns dask.Delayed @overload @@ -2358,8 +2348,7 @@ def to_zarr( zarr_version: int | None = None, write_empty_chunks: bool | None = None, chunkmanager_store_kwargs: dict[str, Any] | None = None, - ) -> Delayed: - ... + ) -> Delayed: ... def to_zarr( self, @@ -4188,12 +4177,10 @@ def _rename_indexes( new_index = index.rename(name_dict, dims_dict) new_coord_names = [name_dict.get(k, k) for k in coord_names] indexes.update({k: new_index for k in new_coord_names}) - new_index_vars = new_index.create_variables( - { - new: self._variables[old] - for old, new in zip(coord_names, new_coord_names) - } - ) + new_index_vars = new_index.create_variables({ + new: self._variables[old] + for old, new in zip(coord_names, new_coord_names) + }) variables.update(new_index_vars) return indexes, variables @@ -4932,9 +4919,9 @@ def drop_or_convert(var_names): # multi-index reduced to single index # backward compatibility: unique level coordinate renamed to dimension drop_variables.update(keep_level_vars) - drop_or_convert( - [k for k in level_names if k not in keep_level_vars] - ) + drop_or_convert([ + k for k in level_names if k not in keep_level_vars + ]) else: # always drop the multi-index dimension variable drop_variables.add(index.dim) @@ -6610,9 +6597,18 @@ def ffill(self, dim: Hashable, limit: int | None = None) -> Self: Examples -------- >>> time = pd.date_range("2023-01-01", periods=10, freq="D") - >>> data = np.array( - ... [1, np.nan, np.nan, np.nan, 5, np.nan, np.nan, 8, np.nan, 10] - ... ) + >>> data = np.array([ + ... 1, + ... np.nan, + ... np.nan, + ... np.nan, + ... 5, + ... np.nan, + ... np.nan, + ... 8, + ... np.nan, + ... 10, + ... ]) >>> dataset = xr.Dataset({"data": (("time",), data)}, coords={"time": time}) >>> dataset Size: 160B @@ -6675,9 +6671,18 @@ def bfill(self, dim: Hashable, limit: int | None = None) -> Self: Examples -------- >>> time = pd.date_range("2023-01-01", periods=10, freq="D") - >>> data = np.array( - ... [1, np.nan, np.nan, np.nan, 5, np.nan, np.nan, 8, np.nan, 10] - ... ) + >>> data = np.array([ + ... 1, + ... np.nan, + ... np.nan, + ... np.nan, + ... 5, + ... np.nan, + ... np.nan, + ... 8, + ... np.nan, + ... 10, + ... ]) >>> dataset = xr.Dataset({"data": (("time",), data)}, coords={"time": time}) >>> dataset Size: 160B @@ -7470,13 +7475,13 @@ def to_dict( "data_vars": {}, } for k in self.coords: - d["coords"].update( - {k: self[k].variable.to_dict(data=data, encoding=encoding)} - ) + d["coords"].update({ + k: self[k].variable.to_dict(data=data, encoding=encoding) + }) for k in self.data_vars: - d["data_vars"].update( - {k: self[k].variable.to_dict(data=data, encoding=encoding)} - ) + d["data_vars"].update({ + k: self[k].variable.to_dict(data=data, encoding=encoding) + }) if encoding: d["encoding"] = dict(self.encoding) return d @@ -8903,9 +8908,9 @@ def polyfit( stacked_coords: dict[Hashable, DataArray] = {} if dims_to_stack: stacked_dim = utils.get_temp_dimname(dims_to_stack, "stacked") - rhs = da.transpose(dim, *dims_to_stack).stack( - {stacked_dim: dims_to_stack} - ) + rhs = da.transpose(dim, *dims_to_stack).stack({ + stacked_dim: dims_to_stack + }) stacked_coords = {stacked_dim: rhs[stacked_dim]} scale_da = scale[:, np.newaxis] else: @@ -9573,9 +9578,10 @@ def eval( Examples -------- - >>> ds = xr.Dataset( - ... {"a": ("x", np.arange(0, 5, 1)), "b": ("x", np.linspace(0, 1, 5))} - ... ) + >>> ds = xr.Dataset({ + ... "a": ("x", np.arange(0, 5, 1)), + ... "b": ("x", np.linspace(0, 1, 5)), + ... }) >>> ds Size: 80B Dimensions: (x: 5) @@ -9903,9 +9909,9 @@ def _wrapper(Y, *args, **kwargs): name = f"{str(name)}_" input_core_dims = [reduce_dims_ for _ in range(n_coords + 1)] - input_core_dims.extend( - [[] for _ in range(3 * n_params)] - ) # core_dims for p0 and bounds + input_core_dims.extend([ + [] for _ in range(3 * n_params) + ]) # core_dims for p0 and bounds popt, pcov = apply_ufunc( _wrapper, @@ -9932,9 +9938,11 @@ def _wrapper(Y, *args, **kwargs): result[name + "curvefit_coefficients"] = popt result[name + "curvefit_covariance"] = pcov - result = result.assign_coords( - {"param": params, "cov_i": params, "cov_j": params} - ) + result = result.assign_coords({ + "param": params, + "cov_i": params, + "cov_j": params, + }) result.attrs = self.attrs.copy() return result diff --git a/xarray/core/extensions.py b/xarray/core/extensions.py index be7f0701a6b..e703924118b 100644 --- a/xarray/core/extensions.py +++ b/xarray/core/extensions.py @@ -108,9 +108,10 @@ def register_dataset_accessor(name): Back in an interactive IPython session: - >>> ds = xr.Dataset( - ... {"longitude": np.linspace(0, 10), "latitude": np.linspace(0, 20)} - ... ) + >>> ds = xr.Dataset({ + ... "longitude": np.linspace(0, 10), + ... "latitude": np.linspace(0, 20), + ... }) >>> ds.geo.center (10.0, 5.0) >>> ds.geo.plot() # plots data on a map diff --git a/xarray/core/formatting.py b/xarray/core/formatting.py index 260dabd9d31..a74ae92fdc0 100644 --- a/xarray/core/formatting.py +++ b/xarray/core/formatting.py @@ -1,5 +1,4 @@ -"""String formatting routines for __repr__. -""" +"""String formatting routines for __repr__.""" from __future__ import annotations @@ -241,13 +240,11 @@ def format_array_flat(array, max_width: int): num_back = count - num_front # note that num_back is 0 <--> array.size is 0 or 1 # <--> relevant_back_items is [] - pprint_str = "".join( - [ - " ".join(relevant_front_items[:num_front]), - padding, - " ".join(relevant_back_items[-num_back:]), - ] - ) + pprint_str = "".join([ + " ".join(relevant_front_items[:num_front]), + padding, + " ".join(relevant_back_items[-num_back:]), + ]) # As a final check, if it's still too long even with the limit in values, # replace the end with an ellipsis @@ -816,9 +813,9 @@ def extra_items_repr(extra_keys, mapping, ab_side, kwargs): b_attrs = b_mapping[k].attrs attrs_to_print = set(a_attrs) ^ set(b_attrs) - attrs_to_print.update( - {k for k in set(a_attrs) & set(b_attrs) if a_attrs[k] != b_attrs[k]} - ) + attrs_to_print.update({ + k for k in set(a_attrs) & set(b_attrs) if a_attrs[k] != b_attrs[k] + }) for m in (a_mapping, b_mapping): attr_s = "\n".join( " " + summarize_attr(ak, av) diff --git a/xarray/core/indexes.py b/xarray/core/indexes.py index 338d842177d..5bbbec9ee29 100644 --- a/xarray/core/indexes.py +++ b/xarray/core/indexes.py @@ -976,9 +976,9 @@ def concat( else: level_coords_dtype = {} for name in indexes[0].level_coords_dtype: - level_coords_dtype[name] = np.result_type( - *[idx.level_coords_dtype[name] for idx in indexes] - ) + level_coords_dtype[name] = np.result_type(*[ + idx.level_coords_dtype[name] for idx in indexes + ]) return cls(new_pd_index, dim=dim, level_coords_dtype=level_coords_dtype) @@ -1095,9 +1095,9 @@ def keep_levels( corresponding coordinates. """ - index = self.index.droplevel( - [k for k in self.index.names if k not in level_variables] - ) + index = self.index.droplevel([ + k for k in self.index.names if k not in level_variables + ]) if isinstance(index, pd.MultiIndex): level_coords_dtype = {k: self.level_coords_dtype[k] for k in index.names} diff --git a/xarray/core/merge.py b/xarray/core/merge.py index a689620e524..a0e6b1e2685 100644 --- a/xarray/core/merge.py +++ b/xarray/core/merge.py @@ -40,16 +40,14 @@ PANDAS_TYPES = (pd.Series, pd.DataFrame) -_VALID_COMPAT = Frozen( - { - "identical": 0, - "equals": 1, - "broadcast_equals": 2, - "minimal": 3, - "no_conflicts": 4, - "override": 5, - } -) +_VALID_COMPAT = Frozen({ + "identical": 0, + "equals": 1, + "broadcast_equals": 2, + "minimal": 3, + "no_conflicts": 4, + "override": 5, +}) class Context: @@ -608,13 +606,11 @@ def merge_attrs(variable_attrs, combine_attrs, context=None): result = {} dropped_keys = set() for attrs in variable_attrs: - result.update( - { - key: value - for key, value in attrs.items() - if key not in result and key not in dropped_keys - } - ) + result.update({ + key: value + for key, value in attrs.items() + if key not in result and key not in dropped_keys + }) result = { key: value for key, value in result.items() diff --git a/xarray/core/nputils.py b/xarray/core/nputils.py index 6970d37402f..31e074d4449 100644 --- a/xarray/core/nputils.py +++ b/xarray/core/nputils.py @@ -145,9 +145,9 @@ def _advanced_indexer_subspaces(key): return (), () non_slices = [k for k in key if not isinstance(k, slice)] - broadcasted_shape = np.broadcast_shapes( - *[item.shape if is_duck_array(item) else (0,) for item in non_slices] - ) + broadcasted_shape = np.broadcast_shapes(*[ + item.shape if is_duck_array(item) else (0,) for item in non_slices + ]) ndim = len(broadcasted_shape) mixed_positions = advanced_index_positions[0] + np.arange(ndim) vindex_positions = np.arange(ndim) diff --git a/xarray/core/parallel.py b/xarray/core/parallel.py index 8d295b521d7..8218c494cbf 100644 --- a/xarray/core/parallel.py +++ b/xarray/core/parallel.py @@ -349,9 +349,9 @@ def _wrapper( result = func(*converted_args, **kwargs) - merged_coordinates = merge( - [arg.coords for arg in args if isinstance(arg, (Dataset, DataArray))] - ).coords + merged_coordinates = merge([ + arg.coords for arg in args if isinstance(arg, (Dataset, DataArray)) + ]).coords # check all dims are present missing_dimensions = set(expected["shapes"]) - set(result.sizes) @@ -466,9 +466,10 @@ def _wrapper( tuple(k for k in preserved_coords.variables if k not in template_coords) ) - coordinates = merge( - (preserved_coords, template.coords.to_dataset()[new_coord_vars]) - ).coords + coordinates = merge(( + preserved_coords, + template.coords.to_dataset()[new_coord_vars], + )).coords output_chunks: Mapping[Hashable, tuple[int, ...]] = { dim: input_chunks[dim] for dim in template.dims if dim in input_chunks } diff --git a/xarray/core/rolling.py b/xarray/core/rolling.py index 6cf49fc995b..ecaa2179b42 100644 --- a/xarray/core/rolling.py +++ b/xarray/core/rolling.py @@ -886,9 +886,9 @@ def construct( dataset[key].attrs = {} # Need to stride coords as well. TODO: is there a better way? - coords = self.obj.isel( - {d: slice(None, None, s) for d, s in zip(self.dim, strides)} - ).coords + coords = self.obj.isel({ + d: slice(None, None, s) for d, s in zip(self.dim, strides) + }).coords attrs = self.obj.attrs if keep_attrs else {} diff --git a/xarray/core/types.py b/xarray/core/types.py index 8c3164c52fa..410cf3de00b 100644 --- a/xarray/core/types.py +++ b/xarray/core/types.py @@ -102,16 +102,13 @@ class Alignable(Protocol): """ @property - def dims(self) -> Frozen[Hashable, int] | tuple[Hashable, ...]: - ... + def dims(self) -> Frozen[Hashable, int] | tuple[Hashable, ...]: ... @property - def sizes(self) -> Mapping[Hashable, int]: - ... + def sizes(self) -> Mapping[Hashable, int]: ... @property - def xindexes(self) -> Indexes[Index]: - ... + def xindexes(self) -> Indexes[Index]: ... def _reindex_callback( self, @@ -122,27 +119,22 @@ def _reindex_callback( fill_value: Any, exclude_dims: frozenset[Hashable], exclude_vars: frozenset[Hashable], - ) -> Self: - ... + ) -> Self: ... def _overwrite_indexes( self, indexes: Mapping[Any, Index], variables: Mapping[Any, Variable] | None = None, - ) -> Self: - ... + ) -> Self: ... - def __len__(self) -> int: - ... + def __len__(self) -> int: ... - def __iter__(self) -> Iterator[Hashable]: - ... + def __iter__(self) -> Iterator[Hashable]: ... def copy( self, deep: bool = False, - ) -> Self: - ... + ) -> Self: ... T_Alignable = TypeVar("T_Alignable", bound="Alignable") diff --git a/xarray/core/utils.py b/xarray/core/utils.py index 0e6a352c0ac..f44f0cd0061 100644 --- a/xarray/core/utils.py +++ b/xarray/core/utils.py @@ -470,12 +470,10 @@ def __getitem__(self, key: K) -> V: return super().__getitem__(key) @overload - def get(self, key: K, /) -> V | None: - ... + def get(self, key: K, /) -> V | None: ... @overload - def get(self, key: K, /, default: V | T) -> V | T: - ... + def get(self, key: K, /, default: V | T) -> V | T: ... def get(self, key: K, default: T | None = None) -> V | T | None: self._warn() @@ -859,8 +857,7 @@ def parse_dims( *, check_exists: bool = True, replace_none: Literal[True] = True, -) -> tuple[Hashable, ...]: - ... +) -> tuple[Hashable, ...]: ... @overload @@ -870,8 +867,7 @@ def parse_dims( *, check_exists: bool = True, replace_none: Literal[False], -) -> tuple[Hashable, ...] | None | ellipsis: - ... +) -> tuple[Hashable, ...] | None | ellipsis: ... def parse_dims( @@ -922,8 +918,7 @@ def parse_ordered_dims( *, check_exists: bool = True, replace_none: Literal[True] = True, -) -> tuple[Hashable, ...]: - ... +) -> tuple[Hashable, ...]: ... @overload @@ -933,8 +928,7 @@ def parse_ordered_dims( *, check_exists: bool = True, replace_none: Literal[False], -) -> tuple[Hashable, ...] | None | ellipsis: - ... +) -> tuple[Hashable, ...] | None | ellipsis: ... def parse_ordered_dims( @@ -1016,12 +1010,10 @@ def __init__(self, accessor: type[_Accessor]) -> None: self._accessor = accessor @overload - def __get__(self, obj: None, cls) -> type[_Accessor]: - ... + def __get__(self, obj: None, cls) -> type[_Accessor]: ... @overload - def __get__(self, obj: object, cls) -> _Accessor: - ... + def __get__(self, obj: object, cls) -> _Accessor: ... def __get__(self, obj: None | object, cls) -> type[_Accessor] | _Accessor: if obj is None: @@ -1056,12 +1048,10 @@ def contains_only_chunked_or_numpy(obj) -> bool: if isinstance(obj, DataArray): obj = obj._to_temp_dataset() - return all( - [ - isinstance(var.data, np.ndarray) or is_chunked_array(var.data) - for var in obj.variables.values() - ] - ) + return all([ + isinstance(var.data, np.ndarray) or is_chunked_array(var.data) + for var in obj.variables.values() + ]) def find_stack_level(test_mode=False) -> int: diff --git a/xarray/namedarray/_typing.py b/xarray/namedarray/_typing.py index 96bb813bc69..b715973814f 100644 --- a/xarray/namedarray/_typing.py +++ b/xarray/namedarray/_typing.py @@ -56,8 +56,7 @@ class Default(Enum): @runtime_checkable class _SupportsDType(Protocol[_DType_co]): @property - def dtype(self) -> _DType_co: - ... + def dtype(self) -> _DType_co: ... _DTypeLike = Union[ @@ -102,14 +101,12 @@ def dtype(self) -> _DType_co: class _SupportsReal(Protocol[_T_co]): @property - def real(self) -> _T_co: - ... + def real(self) -> _T_co: ... class _SupportsImag(Protocol[_T_co]): @property - def imag(self) -> _T_co: - ... + def imag(self) -> _T_co: ... @runtime_checkable @@ -121,12 +118,10 @@ class _array(Protocol[_ShapeType_co, _DType_co]): """ @property - def shape(self) -> _Shape: - ... + def shape(self) -> _Shape: ... @property - def dtype(self) -> _DType_co: - ... + def dtype(self) -> _DType_co: ... @runtime_checkable @@ -142,12 +137,10 @@ class _arrayfunction( @overload def __getitem__( self, key: _arrayfunction[Any, Any] | tuple[_arrayfunction[Any, Any], ...], / - ) -> _arrayfunction[Any, _DType_co]: - ... + ) -> _arrayfunction[Any, _DType_co]: ... @overload - def __getitem__(self, key: _IndexKeyLike, /) -> Any: - ... + def __getitem__(self, key: _IndexKeyLike, /) -> Any: ... def __getitem__( self, @@ -157,21 +150,17 @@ def __getitem__( | tuple[_arrayfunction[Any, Any], ...] ), /, - ) -> _arrayfunction[Any, _DType_co] | Any: - ... + ) -> _arrayfunction[Any, _DType_co] | Any: ... @overload - def __array__(self, dtype: None = ..., /) -> np.ndarray[Any, _DType_co]: - ... + def __array__(self, dtype: None = ..., /) -> np.ndarray[Any, _DType_co]: ... @overload - def __array__(self, dtype: _DType, /) -> np.ndarray[Any, _DType]: - ... + def __array__(self, dtype: _DType, /) -> np.ndarray[Any, _DType]: ... def __array__( self, dtype: _DType | None = ..., / - ) -> np.ndarray[Any, _DType] | np.ndarray[Any, _DType_co]: - ... + ) -> np.ndarray[Any, _DType] | np.ndarray[Any, _DType_co]: ... # TODO: Should return the same subclass but with a new dtype generic. # https://github.com/python/typing/issues/548 @@ -181,8 +170,7 @@ def __array_ufunc__( method: Any, *inputs: Any, **kwargs: Any, - ) -> Any: - ... + ) -> Any: ... # TODO: Should return the same subclass but with a new dtype generic. # https://github.com/python/typing/issues/548 @@ -192,16 +180,13 @@ def __array_function__( types: Iterable[type], args: Iterable[Any], kwargs: Mapping[str, Any], - ) -> Any: - ... + ) -> Any: ... @property - def imag(self) -> _arrayfunction[_ShapeType_co, Any]: - ... + def imag(self) -> _arrayfunction[_ShapeType_co, Any]: ... @property - def real(self) -> _arrayfunction[_ShapeType_co, Any]: - ... + def real(self) -> _arrayfunction[_ShapeType_co, Any]: ... @runtime_checkable @@ -218,11 +203,9 @@ def __getitem__( _IndexKeyLike | Any ), # TODO: Any should be _arrayapi[Any, _dtype[np.integer]] /, - ) -> _arrayapi[Any, Any]: - ... + ) -> _arrayapi[Any, Any]: ... - def __array_namespace__(self) -> ModuleType: - ... + def __array_namespace__(self) -> ModuleType: ... # NamedArray can most likely use both __array_function__ and __array_namespace__: @@ -247,8 +230,7 @@ class _chunkedarray( """ @property - def chunks(self) -> _Chunks: - ... + def chunks(self) -> _Chunks: ... @runtime_checkable @@ -262,8 +244,7 @@ class _chunkedarrayfunction( """ @property - def chunks(self) -> _Chunks: - ... + def chunks(self) -> _Chunks: ... @runtime_checkable @@ -277,8 +258,7 @@ class _chunkedarrayapi( """ @property - def chunks(self) -> _Chunks: - ... + def chunks(self) -> _Chunks: ... # NamedArray can most likely use both __array_function__ and __array_namespace__: @@ -299,8 +279,7 @@ class _sparsearray( Corresponds to np.ndarray. """ - def todense(self) -> np.ndarray[Any, _DType_co]: - ... + def todense(self) -> np.ndarray[Any, _DType_co]: ... @runtime_checkable @@ -313,8 +292,7 @@ class _sparsearrayfunction( Corresponds to np.ndarray. """ - def todense(self) -> np.ndarray[Any, _DType_co]: - ... + def todense(self) -> np.ndarray[Any, _DType_co]: ... @runtime_checkable @@ -327,8 +305,7 @@ class _sparsearrayapi( Corresponds to np.ndarray. """ - def todense(self) -> np.ndarray[Any, _DType_co]: - ... + def todense(self) -> np.ndarray[Any, _DType_co]: ... # NamedArray can most likely use both __array_function__ and __array_namespace__: diff --git a/xarray/namedarray/core.py b/xarray/namedarray/core.py index e5e5fbb760c..29722690437 100644 --- a/xarray/namedarray/core.py +++ b/xarray/namedarray/core.py @@ -102,8 +102,7 @@ def _new( dims: _DimsLike | Default = ..., data: duckarray[_ShapeType, _DType] = ..., attrs: _AttrsLike | Default = ..., -) -> NamedArray[_ShapeType, _DType]: - ... +) -> NamedArray[_ShapeType, _DType]: ... @overload @@ -112,8 +111,7 @@ def _new( dims: _DimsLike | Default = ..., data: Default = ..., attrs: _AttrsLike | Default = ..., -) -> NamedArray[_ShapeType_co, _DType_co]: - ... +) -> NamedArray[_ShapeType_co, _DType_co]: ... def _new( @@ -161,8 +159,7 @@ def from_array( dims: _DimsLike, data: duckarray[_ShapeType, _DType], attrs: _AttrsLike = ..., -) -> NamedArray[_ShapeType, _DType]: - ... +) -> NamedArray[_ShapeType, _DType]: ... @overload @@ -170,8 +167,7 @@ def from_array( dims: _DimsLike, data: ArrayLike, attrs: _AttrsLike = ..., -) -> NamedArray[Any, Any]: - ... +) -> NamedArray[Any, Any]: ... def from_array( @@ -283,8 +279,7 @@ def _new( dims: _DimsLike | Default = ..., data: duckarray[_ShapeType, _DType] = ..., attrs: _AttrsLike | Default = ..., - ) -> NamedArray[_ShapeType, _DType]: - ... + ) -> NamedArray[_ShapeType, _DType]: ... @overload def _new( @@ -292,8 +287,7 @@ def _new( dims: _DimsLike | Default = ..., data: Default = ..., attrs: _AttrsLike | Default = ..., - ) -> NamedArray[_ShapeType_co, _DType_co]: - ... + ) -> NamedArray[_ShapeType_co, _DType_co]: ... def _new( self, @@ -658,12 +652,10 @@ def _dask_finalize( return type(self)(self._dims, data, attrs=self._attrs) @overload - def get_axis_num(self, dim: Iterable[Hashable]) -> tuple[int, ...]: - ... + def get_axis_num(self, dim: Iterable[Hashable]) -> tuple[int, ...]: ... @overload - def get_axis_num(self, dim: Hashable) -> int: - ... + def get_axis_num(self, dim: Hashable) -> int: ... def get_axis_num(self, dim: Hashable | Iterable[Hashable]) -> int | tuple[int, ...]: """Return axis number(s) corresponding to dimension(s) in this array. diff --git a/xarray/plot/accessor.py b/xarray/plot/accessor.py index 203bae2691f..9db4ae4e3f7 100644 --- a/xarray/plot/accessor.py +++ b/xarray/plot/accessor.py @@ -77,8 +77,7 @@ def line( # type: ignore[misc,unused-ignore] # None is hashable :( add_legend: bool = True, _labels: bool = True, **kwargs: Any, - ) -> list[Line3D]: - ... + ) -> list[Line3D]: ... @overload def line( @@ -104,8 +103,7 @@ def line( add_legend: bool = True, _labels: bool = True, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def line( @@ -131,8 +129,7 @@ def line( add_legend: bool = True, _labels: bool = True, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @functools.wraps(dataarray_plot.line, assigned=("__doc__",)) def line(self, *args, **kwargs) -> list[Line3D] | FacetGrid[DataArray]: @@ -148,8 +145,7 @@ def step( # type: ignore[misc,unused-ignore] # None is hashable :( row: None = None, # no wrap -> primitive col: None = None, # no wrap -> primitive **kwargs: Any, - ) -> list[Line3D]: - ... + ) -> list[Line3D]: ... @overload def step( @@ -161,8 +157,7 @@ def step( row: Hashable, # wrap -> FacetGrid col: Hashable | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def step( @@ -174,8 +169,7 @@ def step( row: Hashable | None = None, col: Hashable, # wrap -> FacetGrid **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @functools.wraps(dataarray_plot.step, assigned=("__doc__",)) def step(self, *args, **kwargs) -> list[Line3D] | FacetGrid[DataArray]: @@ -219,8 +213,7 @@ def scatter( # type: ignore[misc,unused-ignore] # None is hashable :( extend=None, levels=None, **kwargs, - ) -> PathCollection: - ... + ) -> PathCollection: ... @overload def scatter( @@ -260,8 +253,7 @@ def scatter( extend=None, levels=None, **kwargs, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def scatter( @@ -301,8 +293,7 @@ def scatter( extend=None, levels=None, **kwargs, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @functools.wraps(dataarray_plot.scatter, assigned=("__doc__",)) def scatter(self, *args, **kwargs) -> PathCollection | FacetGrid[DataArray]: @@ -345,8 +336,7 @@ def imshow( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> AxesImage: - ... + ) -> AxesImage: ... @overload def imshow( @@ -385,8 +375,7 @@ def imshow( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def imshow( @@ -425,8 +414,7 @@ def imshow( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @functools.wraps(dataarray_plot.imshow, assigned=("__doc__",)) def imshow(self, *args, **kwargs) -> AxesImage | FacetGrid[DataArray]: @@ -469,8 +457,7 @@ def contour( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> QuadContourSet: - ... + ) -> QuadContourSet: ... @overload def contour( @@ -509,8 +496,7 @@ def contour( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def contour( @@ -549,8 +535,7 @@ def contour( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @functools.wraps(dataarray_plot.contour, assigned=("__doc__",)) def contour(self, *args, **kwargs) -> QuadContourSet | FacetGrid[DataArray]: @@ -593,8 +578,7 @@ def contourf( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> QuadContourSet: - ... + ) -> QuadContourSet: ... @overload def contourf( @@ -633,8 +617,7 @@ def contourf( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def contourf( @@ -673,8 +656,7 @@ def contourf( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid: - ... + ) -> FacetGrid: ... @functools.wraps(dataarray_plot.contourf, assigned=("__doc__",)) def contourf(self, *args, **kwargs) -> QuadContourSet | FacetGrid[DataArray]: @@ -717,8 +699,7 @@ def pcolormesh( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> QuadMesh: - ... + ) -> QuadMesh: ... @overload def pcolormesh( @@ -757,8 +738,7 @@ def pcolormesh( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @overload def pcolormesh( @@ -797,8 +777,7 @@ def pcolormesh( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid[DataArray]: - ... + ) -> FacetGrid[DataArray]: ... @functools.wraps(dataarray_plot.pcolormesh, assigned=("__doc__",)) def pcolormesh(self, *args, **kwargs) -> QuadMesh | FacetGrid[DataArray]: @@ -841,8 +820,7 @@ def surface( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> Poly3DCollection: - ... + ) -> Poly3DCollection: ... @overload def surface( @@ -881,8 +859,7 @@ def surface( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid: - ... + ) -> FacetGrid: ... @overload def surface( @@ -921,8 +898,7 @@ def surface( ylim: tuple[float, float] | None = None, norm: Normalize | None = None, **kwargs: Any, - ) -> FacetGrid: - ... + ) -> FacetGrid: ... @functools.wraps(dataarray_plot.surface, assigned=("__doc__",)) def surface(self, *args, **kwargs) -> Poly3DCollection: @@ -985,8 +961,7 @@ def scatter( # type: ignore[misc,unused-ignore] # None is hashable :( extend=None, levels=None, **kwargs: Any, - ) -> PathCollection: - ... + ) -> PathCollection: ... @overload def scatter( @@ -1026,8 +1001,7 @@ def scatter( extend=None, levels=None, **kwargs: Any, - ) -> FacetGrid[Dataset]: - ... + ) -> FacetGrid[Dataset]: ... @overload def scatter( @@ -1067,8 +1041,7 @@ def scatter( extend=None, levels=None, **kwargs: Any, - ) -> FacetGrid[Dataset]: - ... + ) -> FacetGrid[Dataset]: ... @functools.wraps(dataset_plot.scatter, assigned=("__doc__",)) def scatter(self, *args, **kwargs) -> PathCollection | FacetGrid[Dataset]: @@ -1108,8 +1081,7 @@ def quiver( # type: ignore[misc,unused-ignore] # None is hashable :( extend=None, cmap=None, **kwargs: Any, - ) -> Quiver: - ... + ) -> Quiver: ... @overload def quiver( @@ -1145,8 +1117,7 @@ def quiver( extend=None, cmap=None, **kwargs: Any, - ) -> FacetGrid[Dataset]: - ... + ) -> FacetGrid[Dataset]: ... @overload def quiver( @@ -1182,8 +1153,7 @@ def quiver( extend=None, cmap=None, **kwargs: Any, - ) -> FacetGrid[Dataset]: - ... + ) -> FacetGrid[Dataset]: ... @functools.wraps(dataset_plot.quiver, assigned=("__doc__",)) def quiver(self, *args, **kwargs) -> Quiver | FacetGrid[Dataset]: @@ -1223,8 +1193,7 @@ def streamplot( # type: ignore[misc,unused-ignore] # None is hashable :( extend=None, cmap=None, **kwargs: Any, - ) -> LineCollection: - ... + ) -> LineCollection: ... @overload def streamplot( @@ -1260,8 +1229,7 @@ def streamplot( extend=None, cmap=None, **kwargs: Any, - ) -> FacetGrid[Dataset]: - ... + ) -> FacetGrid[Dataset]: ... @overload def streamplot( @@ -1297,8 +1265,7 @@ def streamplot( extend=None, cmap=None, **kwargs: Any, - ) -> FacetGrid[Dataset]: - ... + ) -> FacetGrid[Dataset]: ... @functools.wraps(dataset_plot.streamplot, assigned=("__doc__",)) def streamplot(self, *args, **kwargs) -> LineCollection | FacetGrid[Dataset]: diff --git a/xarray/plot/dataarray_plot.py b/xarray/plot/dataarray_plot.py index a82ed63c68f..67a61e2a105 100644 --- a/xarray/plot/dataarray_plot.py +++ b/xarray/plot/dataarray_plot.py @@ -207,9 +207,9 @@ def _prepare_plot1d_data( # Broadcast together all the chosen variables: plts = dict(y=darray) - plts.update( - {k: darray.coords[v] for k, v in coords_to_plot.items() if v is not None} - ) + plts.update({ + k: darray.coords[v] for k, v in coords_to_plot.items() if v is not None + }) plts = dict(zip(plts.keys(), broadcast(*(plts.values())))) return plts @@ -333,8 +333,7 @@ def line( # type: ignore[misc,unused-ignore] # None is hashable :( add_legend: bool = True, _labels: bool = True, **kwargs: Any, -) -> list[Line3D]: - ... +) -> list[Line3D]: ... @overload @@ -361,8 +360,7 @@ def line( add_legend: bool = True, _labels: bool = True, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -389,8 +387,7 @@ def line( add_legend: bool = True, _labels: bool = True, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... # This function signature should not change so that it can use @@ -544,8 +541,7 @@ def step( # type: ignore[misc,unused-ignore] # None is hashable :( row: None = None, # no wrap -> primitive col: None = None, # no wrap -> primitive **kwargs: Any, -) -> list[Line3D]: - ... +) -> list[Line3D]: ... @overload @@ -558,8 +554,7 @@ def step( row: Hashable, # wrap -> FacetGrid col: Hashable | None = None, **kwargs: Any, -) -> FacetGrid[DataArray]: - ... +) -> FacetGrid[DataArray]: ... @overload @@ -572,8 +567,7 @@ def step( row: Hashable | None = None, col: Hashable, # wrap -> FacetGrid **kwargs: Any, -) -> FacetGrid[DataArray]: - ... +) -> FacetGrid[DataArray]: ... def step( @@ -1146,8 +1140,7 @@ def scatter( # type: ignore[misc,unused-ignore] # None is hashable :( extend: ExtendOptions = None, levels: ArrayLike | None = None, **kwargs, -) -> PathCollection: - ... +) -> PathCollection: ... @overload @@ -1188,8 +1181,7 @@ def scatter( extend: ExtendOptions = None, levels: ArrayLike | None = None, **kwargs, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -1230,8 +1222,7 @@ def scatter( extend: ExtendOptions = None, levels: ArrayLike | None = None, **kwargs, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @_plot1d @@ -1698,8 +1689,7 @@ def imshow( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> AxesImage: - ... +) -> AxesImage: ... @overload @@ -1739,8 +1729,7 @@ def imshow( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -1780,8 +1769,7 @@ def imshow( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @_plot2d @@ -1917,8 +1905,7 @@ def contour( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> QuadContourSet: - ... +) -> QuadContourSet: ... @overload @@ -1958,8 +1945,7 @@ def contour( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -1999,8 +1985,7 @@ def contour( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @_plot2d @@ -2053,8 +2038,7 @@ def contourf( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> QuadContourSet: - ... +) -> QuadContourSet: ... @overload @@ -2094,8 +2078,7 @@ def contourf( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -2135,8 +2118,7 @@ def contourf( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @_plot2d @@ -2189,8 +2171,7 @@ def pcolormesh( # type: ignore[misc,unused-ignore] # None is hashable :( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> QuadMesh: - ... +) -> QuadMesh: ... @overload @@ -2230,8 +2211,7 @@ def pcolormesh( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -2271,8 +2251,7 @@ def pcolormesh( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @_plot2d @@ -2376,8 +2355,7 @@ def surface( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> Poly3DCollection: - ... +) -> Poly3DCollection: ... @overload @@ -2417,8 +2395,7 @@ def surface( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @overload @@ -2458,8 +2435,7 @@ def surface( ylim: ArrayLike | None = None, norm: Normalize | None = None, **kwargs: Any, -) -> FacetGrid[T_DataArray]: - ... +) -> FacetGrid[T_DataArray]: ... @_plot2d diff --git a/xarray/plot/dataset_plot.py b/xarray/plot/dataset_plot.py index a3ca201eec4..edc2bf43629 100644 --- a/xarray/plot/dataset_plot.py +++ b/xarray/plot/dataset_plot.py @@ -354,8 +354,7 @@ def quiver( # type: ignore[misc,unused-ignore] # None is hashable :( extend: ExtendOptions = None, cmap: str | Colormap | None = None, **kwargs: Any, -) -> Quiver: - ... +) -> Quiver: ... @overload @@ -392,8 +391,7 @@ def quiver( extend: ExtendOptions = None, cmap: str | Colormap | None = None, **kwargs: Any, -) -> FacetGrid[Dataset]: - ... +) -> FacetGrid[Dataset]: ... @overload @@ -430,8 +428,7 @@ def quiver( extend: ExtendOptions = None, cmap: str | Colormap | None = None, **kwargs: Any, -) -> FacetGrid[Dataset]: - ... +) -> FacetGrid[Dataset]: ... @_dsplot @@ -508,8 +505,7 @@ def streamplot( # type: ignore[misc,unused-ignore] # None is hashable :( extend: ExtendOptions = None, cmap: str | Colormap | None = None, **kwargs: Any, -) -> LineCollection: - ... +) -> LineCollection: ... @overload @@ -546,8 +542,7 @@ def streamplot( extend: ExtendOptions = None, cmap: str | Colormap | None = None, **kwargs: Any, -) -> FacetGrid[Dataset]: - ... +) -> FacetGrid[Dataset]: ... @overload @@ -584,8 +579,7 @@ def streamplot( extend: ExtendOptions = None, cmap: str | Colormap | None = None, **kwargs: Any, -) -> FacetGrid[Dataset]: - ... +) -> FacetGrid[Dataset]: ... @_dsplot @@ -786,8 +780,7 @@ def scatter( # type: ignore[misc,unused-ignore] # None is hashable :( extend: ExtendOptions = None, levels: ArrayLike | None = None, **kwargs: Any, -) -> PathCollection: - ... +) -> PathCollection: ... @overload @@ -828,8 +821,7 @@ def scatter( extend: ExtendOptions = None, levels: ArrayLike | None = None, **kwargs: Any, -) -> FacetGrid[DataArray]: - ... +) -> FacetGrid[DataArray]: ... @overload @@ -870,8 +862,7 @@ def scatter( extend: ExtendOptions = None, levels: ArrayLike | None = None, **kwargs: Any, -) -> FacetGrid[DataArray]: - ... +) -> FacetGrid[DataArray]: ... @_update_doc_to_dataset(dataarray_plot.scatter) diff --git a/xarray/plot/utils.py b/xarray/plot/utils.py index 6f1a5a68cfe..65e600ff455 100644 --- a/xarray/plot/utils.py +++ b/xarray/plot/utils.py @@ -1291,16 +1291,14 @@ def _infer_meta_data(ds, x, y, hue, hue_style, add_guide, funcname): def _parse_size( data: None, norm: tuple[float | None, float | None, bool] | Normalize | None, -) -> None: - ... +) -> None: ... @overload def _parse_size( data: DataArray, norm: tuple[float | None, float | None, bool] | Normalize | None, -) -> pd.Series: - ... +) -> pd.Series: ... # copied from seaborn @@ -1445,12 +1443,10 @@ def data_is_numeric(self) -> bool: return self._data_is_numeric @overload - def _calc_widths(self, y: np.ndarray) -> np.ndarray: - ... + def _calc_widths(self, y: np.ndarray) -> np.ndarray: ... @overload - def _calc_widths(self, y: DataArray) -> DataArray: - ... + def _calc_widths(self, y: DataArray) -> DataArray: ... def _calc_widths(self, y: np.ndarray | DataArray) -> np.ndarray | DataArray: """ @@ -1472,12 +1468,10 @@ def _calc_widths(self, y: np.ndarray | DataArray) -> np.ndarray | DataArray: return widths @overload - def _indexes_centered(self, x: np.ndarray) -> np.ndarray: - ... + def _indexes_centered(self, x: np.ndarray) -> np.ndarray: ... @overload - def _indexes_centered(self, x: DataArray) -> DataArray: - ... + def _indexes_centered(self, x: DataArray) -> DataArray: ... def _indexes_centered(self, x: np.ndarray | DataArray) -> np.ndarray | DataArray: """ diff --git a/xarray/testing/strategies.py b/xarray/testing/strategies.py index d08cbc0b584..c5a7afdf54e 100644 --- a/xarray/testing/strategies.py +++ b/xarray/testing/strategies.py @@ -36,8 +36,7 @@ def __call__( *, shape: "_ShapeLike", dtype: "_DTypeLikeNested", - ) -> st.SearchStrategy[T_DuckArray]: - ... + ) -> st.SearchStrategy[T_DuckArray]: ... def supported_dtypes() -> st.SearchStrategy[np.dtype]: @@ -368,8 +367,7 @@ def unique_subset_of( *, min_size: int = 0, max_size: Union[int, None] = None, -) -> st.SearchStrategy[Sequence[Hashable]]: - ... +) -> st.SearchStrategy[Sequence[Hashable]]: ... @overload @@ -378,8 +376,7 @@ def unique_subset_of( *, min_size: int = 0, max_size: Union[int, None] = None, -) -> st.SearchStrategy[Mapping[Hashable, Any]]: - ... +) -> st.SearchStrategy[Mapping[Hashable, Any]]: ... @st.composite diff --git a/xarray/tests/test_accessor_dt.py b/xarray/tests/test_accessor_dt.py index 686bce943fa..7ec49ab074f 100644 --- a/xarray/tests/test_accessor_dt.py +++ b/xarray/tests/test_accessor_dt.py @@ -253,23 +253,21 @@ def test_seasons(self) -> None: ) dates = dates.append(pd.Index([np.datetime64("NaT")])) dates = xr.DataArray(dates) - seasons = xr.DataArray( - [ - "DJF", - "DJF", - "MAM", - "MAM", - "MAM", - "JJA", - "JJA", - "JJA", - "SON", - "SON", - "SON", - "DJF", - "nan", - ] - ) + seasons = xr.DataArray([ + "DJF", + "DJF", + "MAM", + "MAM", + "MAM", + "JJA", + "JJA", + "JJA", + "SON", + "SON", + "SON", + "DJF", + "nan", + ]) assert_array_equal(seasons.values, dates.dt.season.values) @@ -570,34 +568,30 @@ def test_seasons(cftime_date_type) -> None: dates = xr.DataArray( np.array([cftime_date_type(2000, month, 15) for month in range(1, 13)]) ) - seasons = xr.DataArray( - [ - "DJF", - "DJF", - "MAM", - "MAM", - "MAM", - "JJA", - "JJA", - "JJA", - "SON", - "SON", - "SON", - "DJF", - ] - ) + seasons = xr.DataArray([ + "DJF", + "DJF", + "MAM", + "MAM", + "MAM", + "JJA", + "JJA", + "JJA", + "SON", + "SON", + "SON", + "DJF", + ]) assert_array_equal(seasons.values, dates.dt.season.values) @pytest.fixture def cftime_rounding_dataarray(cftime_date_type): - return xr.DataArray( - [ - [cftime_date_type(1, 1, 1, 1), cftime_date_type(1, 1, 1, 15)], - [cftime_date_type(1, 1, 1, 23), cftime_date_type(1, 1, 2, 1)], - ] - ) + return xr.DataArray([ + [cftime_date_type(1, 1, 1, 1), cftime_date_type(1, 1, 1, 15)], + [cftime_date_type(1, 1, 1, 23), cftime_date_type(1, 1, 2, 1)], + ]) @requires_cftime diff --git a/xarray/tests/test_accessor_str.py b/xarray/tests/test_accessor_str.py index e0c9619b4e7..55677de4dfa 100644 --- a/xarray/tests/test_accessor_str.py +++ b/xarray/tests/test_accessor_str.py @@ -356,12 +356,28 @@ def test_replace(dtype) -> None: assert result.dtype == expected.dtype assert_equal(result, expected) - values = xr.DataArray( - ["A", "B", "C", "Aaba", "Baca", "", "CABA", "dog", "cat"] - ).astype(dtype) - expected = xr.DataArray( - ["YYY", "B", "C", "YYYaba", "Baca", "", "CYYYBYYY", "dog", "cat"] - ).astype(dtype) + values = xr.DataArray([ + "A", + "B", + "C", + "Aaba", + "Baca", + "", + "CABA", + "dog", + "cat", + ]).astype(dtype) + expected = xr.DataArray([ + "YYY", + "B", + "C", + "YYYaba", + "Baca", + "", + "CYYYBYYY", + "dog", + "cat", + ]).astype(dtype) result = values.str.replace("A", "YYY") assert result.dtype == expected.dtype assert_equal(result, expected) @@ -370,16 +386,32 @@ def test_replace(dtype) -> None: assert_equal(result, expected) result = values.str.replace("A", "YYY", case=False) - expected = xr.DataArray( - ["YYY", "B", "C", "YYYYYYbYYY", "BYYYcYYY", "", "CYYYBYYY", "dog", "cYYYt"] - ).astype(dtype) + expected = xr.DataArray([ + "YYY", + "B", + "C", + "YYYYYYbYYY", + "BYYYcYYY", + "", + "CYYYBYYY", + "dog", + "cYYYt", + ]).astype(dtype) assert result.dtype == expected.dtype assert_equal(result, expected) result = values.str.replace("^.a|dog", "XX-XX ", case=False) - expected = xr.DataArray( - ["A", "B", "C", "XX-XX ba", "XX-XX ca", "", "XX-XX BA", "XX-XX ", "XX-XX t"] - ).astype(dtype) + expected = xr.DataArray([ + "A", + "B", + "C", + "XX-XX ba", + "XX-XX ca", + "", + "XX-XX BA", + "XX-XX ", + "XX-XX t", + ]).astype(dtype) assert result.dtype == expected.dtype assert_equal(result, expected) @@ -1729,9 +1761,18 @@ def test_empty_str_methods() -> None: def test_ismethods( dtype, func: Callable[[xr.DataArray], xr.DataArray], expected: list[bool] ) -> None: - values = xr.DataArray( - ["A", "b", "Xy", "4", "3A", "", "TT", "55", "-", " "] - ).astype(dtype) + values = xr.DataArray([ + "A", + "b", + "Xy", + "4", + "3A", + "", + "TT", + "55", + "-", + " ", + ]).astype(dtype) expected_da = xr.DataArray(expected) actual = func(values) @@ -2311,34 +2352,30 @@ def test_wrap() -> None: # two words greater than width, one word less than width, one word # equal to width, one word greater than width, multiple tokens with # trailing whitespace equal to width - values = xr.DataArray( - [ - "hello world", - "hello world!", - "hello world!!", - "abcdefabcde", - "abcdefabcdef", - "abcdefabcdefa", - "ab ab ab ab ", - "ab ab ab ab a", - "\t", - ] - ) + values = xr.DataArray([ + "hello world", + "hello world!", + "hello world!!", + "abcdefabcde", + "abcdefabcdef", + "abcdefabcdefa", + "ab ab ab ab ", + "ab ab ab ab a", + "\t", + ]) # expected values - expected = xr.DataArray( - [ - "hello world", - "hello world!", - "hello\nworld!!", - "abcdefabcde", - "abcdefabcdef", - "abcdefabcdef\na", - "ab ab ab ab", - "ab ab ab ab\na", - "", - ] - ) + expected = xr.DataArray([ + "hello world", + "hello world!", + "hello\nworld!!", + "abcdefabcde", + "abcdefabcdef", + "abcdefabcdef\na", + "ab ab ab ab", + "ab ab ab ab\na", + "", + ]) result = values.str.wrap(12, break_long_words=True) assert result.dtype == expected.dtype diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 668d14b86c9..1176d1bfef0 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -155,9 +155,9 @@ def create_masked_and_scaled_data() -> Dataset: def create_encoded_masked_and_scaled_data() -> Dataset: attributes = {"_FillValue": -1, "add_offset": 10, "scale_factor": np.float32(0.1)} - return Dataset( - {"x": ("t", np.array([-1, -1, 0, 1, 2], dtype=np.int16), attributes)} - ) + return Dataset({ + "x": ("t", np.array([-1, -1, 0, 1, 2], dtype=np.int16), attributes) + }) def create_unsigned_masked_scaled_data() -> Dataset: @@ -480,18 +480,16 @@ def test_roundtrip_object_dtype(self) -> None: strings = np.array(["ab", "cdef", "g"], dtype=object) strings_nans = np.array(["ab", "cdef", np.nan], dtype=object) all_nans = np.array([np.nan, np.nan], dtype=object) - original = Dataset( - { - "floats": ("a", floats), - "floats_nans": ("a", floats_nans), - "bytes": ("b", bytes_), - "bytes_nans": ("b", bytes_nans), - "strings": ("b", strings), - "strings_nans": ("b", strings_nans), - "all_nans": ("c", all_nans), - "nan": ([], np.nan), - } - ) + original = Dataset({ + "floats": ("a", floats), + "floats_nans": ("a", floats_nans), + "bytes": ("b", bytes_), + "bytes_nans": ("b", bytes_nans), + "strings": ("b", strings), + "strings_nans": ("b", strings_nans), + "all_nans": ("c", all_nans), + "nan": ([], np.nan), + }) expected = original.copy(deep=True) with self.roundtrip(original) as actual: try: @@ -989,9 +987,13 @@ def test_coordinate_variables_after_dataset_roundtrip(self) -> None: assert_identical(actual, original) with self.roundtrip(original) as actual: - expected = original.reset_coords( - ["latitude_bnds", "longitude_bnds", "areas", "P0", "latlon"] - ) + expected = original.reset_coords([ + "latitude_bnds", + "longitude_bnds", + "areas", + "P0", + "latlon", + ]) # equal checks that coords and data_vars are equal which # should be enough # identical would require resetting a number of attributes @@ -1075,14 +1077,12 @@ def equals_latlon(obj): assert "coordinates" not in ds["lon"].encoding def test_roundtrip_endian(self) -> None: - ds = Dataset( - { - "x": np.arange(3, 10, dtype=">i2"), - "y": np.arange(3, 20, dtype=" None: def test_compression_encoding_legacy(self) -> None: data = create_test_data() - data["var2"].encoding.update( - { - "zlib": True, - "chunksizes": (5, 5), - "fletcher32": True, - "shuffle": True, - "original_shape": data.var2.shape, - } - ) + data["var2"].encoding.update({ + "zlib": True, + "chunksizes": (5, 5), + "fletcher32": True, + "shuffle": True, + "original_shape": data.var2.shape, + }) with self.roundtrip(data) as actual: for k, v in data["var2"].encoding.items(): assert v == actual["var2"].encoding[k] @@ -1595,9 +1593,11 @@ def chunked_roundtrip( t_chunksize, y_chunksize, x_chunksize = chunk_sizes image = xr.DataArray( - np.arange(t_size * x_size * y_size, dtype=np.int16).reshape( - (t_size, y_size, x_size) - ), + np.arange(t_size * x_size * y_size, dtype=np.int16).reshape(( + t_size, + y_size, + x_size, + )), dims=["t", "y", "x"], ) image.encoding = {"chunksizes": (t_chunksize, y_chunksize, x_chunksize)} @@ -1910,14 +1910,12 @@ def test_compression_encoding(self, compression: str | None) -> None: data = create_test_data(dim_sizes=(20, 80, 10)) encoding_params: dict[str, Any] = dict(compression=compression, blosc_shuffle=1) data["var2"].encoding.update(encoding_params) - data["var2"].encoding.update( - { - "chunksizes": (20, 40), - "original_shape": data.var2.shape, - "blosc_shuffle": 1, - "fletcher32": False, - } - ) + data["var2"].encoding.update({ + "chunksizes": (20, 40), + "original_shape": data.var2.shape, + "blosc_shuffle": 1, + "fletcher32": False, + }) with self.roundtrip(data) as actual: expected_encoding = data["var2"].encoding.copy() # compression does not appear in the retrieved encoding, that differs @@ -2880,9 +2878,9 @@ def test_encoding_chunksizes(self) -> None: @requires_dask def test_chunk_encoding_with_partial_dask_chunks(self) -> None: - original = xr.Dataset( - {"x": xr.DataArray(np.random.random(size=(6, 8)), dims=("a", "b"))} - ).chunk({"a": 3}) + original = xr.Dataset({ + "x": xr.DataArray(np.random.random(size=(6, 8)), dims=("a", "b")) + }).chunk({"a": 3}) with self.roundtrip( original, save_kwargs={"encoding": {"x": {"chunks": [3, 2]}}} @@ -4314,12 +4312,10 @@ def test_open_mfdataset_raise_on_bad_combine_args(self) -> None: open_mfdataset([tmp1, tmp2], concat_dim="x") def test_encoding_mfdataset(self) -> None: - original = Dataset( - { - "foo": ("t", np.random.randn(10)), - "t": ("t", pd.date_range(start="2010-01-01", periods=10, freq="1D")), - } - ) + original = Dataset({ + "foo": ("t", np.random.randn(10)), + "t": ("t", pd.date_range(start="2010-01-01", periods=10, freq="1D")), + }) original.t.encoding["units"] = "days since 2010-01-01" with create_tmp_file() as tmp1: @@ -5264,13 +5260,11 @@ def test_load_single_value_h5netcdf(tmp_path: Path) -> None: backend. This was leading to a ValueError upon loading a single value from a file, see #4471. Test that loading causes no failure. """ - ds = xr.Dataset( - { - "test": xr.DataArray( - np.array([0]), dims=("x",), attrs={"scale_factor": 1, "add_offset": 0} - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + np.array([0]), dims=("x",), attrs={"scale_factor": 1, "add_offset": 0} + ) + }) ds.to_netcdf(tmp_path / "test.nc") with xr.open_dataset(tmp_path / "test.nc", engine="h5netcdf") as ds2: ds2["test"][0].load() @@ -5286,14 +5280,12 @@ def test_open_dataset_chunking_zarr(chunks, tmp_path: Path) -> None: dask_arr = da.from_array( np.ones((500, 500), dtype="float64"), chunks=encoded_chunks ) - ds = xr.Dataset( - { - "test": xr.DataArray( - dask_arr, - dims=("x", "y"), - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + dask_arr, + dims=("x", "y"), + ) + }) ds["test"].encoding["chunks"] = encoded_chunks ds.to_zarr(tmp_path / "test.zarr") @@ -5316,14 +5308,12 @@ def test_chunking_consintency(chunks, tmp_path: Path) -> None: dask_arr = da.from_array( np.ones((500, 500), dtype="float64"), chunks=encoded_chunks ) - ds = xr.Dataset( - { - "test": xr.DataArray( - dask_arr, - dims=("x", "y"), - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + dask_arr, + dims=("x", "y"), + ) + }) ds["test"].encoding["chunks"] = encoded_chunks ds.to_zarr(tmp_path / "test.zarr") ds.to_netcdf(tmp_path / "test.nc") @@ -5517,15 +5507,13 @@ def test_zarr_region_auto_all(self, tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds.to_zarr(tmp_path / "test.zarr") ds_region = 1 + ds.isel(x=slice(2, 4), y=slice(6, 8)) @@ -5541,15 +5529,13 @@ def test_zarr_region_auto_mixed(self, tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds.to_zarr(tmp_path / "test.zarr") ds_region = 1 + ds.isel(x=slice(2, 4), y=slice(6, 8)) @@ -5567,15 +5553,13 @@ def test_zarr_region_auto_noncontiguous(self, tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds.to_zarr(tmp_path / "test.zarr") ds_region = 1 + ds.isel(x=[0, 2, 3], y=[5, 6]) @@ -5586,29 +5570,25 @@ def test_zarr_region_auto_new_coord_vals(self, tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds.to_zarr(tmp_path / "test.zarr") x = np.arange(5, 55, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds_region = 1 + ds.isel(x=slice(2, 4), y=slice(6, 8)) with pytest.raises(KeyError): @@ -5620,15 +5600,13 @@ def test_zarr_region_index_write(self, tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds_region = 1 + ds.isel(x=slice(2, 4), y=slice(6, 8)) @@ -5653,28 +5631,24 @@ def test_zarr_region_append(self, tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds.to_zarr(tmp_path / "test.zarr") x_new = np.arange(40, 70, 10) data_new = np.ones((3, 10)) - ds_new = xr.Dataset( - { - "test": xr.DataArray( - data_new, - dims=("x", "y"), - coords={"x": x_new, "y": y}, - ) - } - ) + ds_new = xr.Dataset({ + "test": xr.DataArray( + data_new, + dims=("x", "y"), + coords={"x": x_new, "y": y}, + ) + }) # Don't allow auto region detection in append mode due to complexities in # implementing the overlap logic and lack of safety with parallel writes @@ -5689,15 +5663,13 @@ def test_zarr_region(tmp_path): x = np.arange(0, 50, 10) y = np.arange(0, 20, 2) data = np.ones((5, 10)) - ds = xr.Dataset( - { - "test": xr.DataArray( - data, - dims=("x", "y"), - coords={"x": x, "y": y}, - ) - } - ) + ds = xr.Dataset({ + "test": xr.DataArray( + data, + dims=("x", "y"), + coords={"x": x, "y": y}, + ) + }) ds.to_zarr(tmp_path / "test.zarr") ds_transposed = ds.transpose("y", "x") diff --git a/xarray/tests/test_backends_api.py b/xarray/tests/test_backends_api.py index d4f8b7ed31d..e7663dc15b0 100644 --- a/xarray/tests/test_backends_api.py +++ b/xarray/tests/test_backends_api.py @@ -99,15 +99,13 @@ class TestPreferredChunks: def create_dataset(self, shape, pref_chunks): """Return a dataset with a variable with the given shape and preferred chunks.""" dims = tuple(f"dim_{idx}" for idx in range(len(shape))) - return xr.Dataset( - { - self.var_name: xr.Variable( - dims, - np.empty(shape, dtype=np.dtype("V1")), - encoding={"preferred_chunks": dict(zip(dims, pref_chunks))}, - ) - } - ) + return xr.Dataset({ + self.var_name: xr.Variable( + dims, + np.empty(shape, dtype=np.dtype("V1")), + encoding={"preferred_chunks": dict(zip(dims, pref_chunks))}, + ) + }) def check_dataset(self, initial, final, expected_chunks): assert_identical(initial, final) diff --git a/xarray/tests/test_cftimeindex.py b/xarray/tests/test_cftimeindex.py index f6eb15fa373..84ae5c8c49d 100644 --- a/xarray/tests/test_cftimeindex.py +++ b/xarray/tests/test_cftimeindex.py @@ -207,13 +207,15 @@ def test_assert_all_valid_date_type(date_type, index): import cftime if date_type is cftime.DatetimeNoLeap: - mixed_date_types = np.array( - [date_type(1, 1, 1), cftime.DatetimeAllLeap(1, 2, 1)] - ) + mixed_date_types = np.array([ + date_type(1, 1, 1), + cftime.DatetimeAllLeap(1, 2, 1), + ]) else: - mixed_date_types = np.array( - [date_type(1, 1, 1), cftime.DatetimeNoLeap(1, 2, 1)] - ) + mixed_date_types = np.array([ + date_type(1, 1, 1), + cftime.DatetimeNoLeap(1, 2, 1), + ]) with pytest.raises(TypeError): assert_all_valid_date_type(mixed_date_types) @@ -1126,12 +1128,10 @@ def test_parse_array_of_cftime_strings(): from cftime import DatetimeNoLeap strings = np.array([["2000-01-01", "2000-01-02"], ["2000-01-03", "2000-01-04"]]) - expected = np.array( - [ - [DatetimeNoLeap(2000, 1, 1), DatetimeNoLeap(2000, 1, 2)], - [DatetimeNoLeap(2000, 1, 3), DatetimeNoLeap(2000, 1, 4)], - ] - ) + expected = np.array([ + [DatetimeNoLeap(2000, 1, 1), DatetimeNoLeap(2000, 1, 2)], + [DatetimeNoLeap(2000, 1, 3), DatetimeNoLeap(2000, 1, 4)], + ]) result = _parse_array_of_cftime_strings(strings, DatetimeNoLeap) np.testing.assert_array_equal(result, expected) @@ -1228,51 +1228,43 @@ def test_rounding_methods_invalid_freq(method): @pytest.fixture def rounding_index(date_type): - return xr.CFTimeIndex( - [ - date_type(1, 1, 1, 1, 59, 59, 999512), - date_type(1, 1, 1, 3, 0, 1, 500001), - date_type(1, 1, 1, 7, 0, 6, 499999), - ] - ) + return xr.CFTimeIndex([ + date_type(1, 1, 1, 1, 59, 59, 999512), + date_type(1, 1, 1, 3, 0, 1, 500001), + date_type(1, 1, 1, 7, 0, 6, 499999), + ]) @requires_cftime def test_ceil(rounding_index, date_type): result = rounding_index.ceil("s") - expected = xr.CFTimeIndex( - [ - date_type(1, 1, 1, 2, 0, 0, 0), - date_type(1, 1, 1, 3, 0, 2, 0), - date_type(1, 1, 1, 7, 0, 7, 0), - ] - ) + expected = xr.CFTimeIndex([ + date_type(1, 1, 1, 2, 0, 0, 0), + date_type(1, 1, 1, 3, 0, 2, 0), + date_type(1, 1, 1, 7, 0, 7, 0), + ]) assert result.equals(expected) @requires_cftime def test_floor(rounding_index, date_type): result = rounding_index.floor("s") - expected = xr.CFTimeIndex( - [ - date_type(1, 1, 1, 1, 59, 59, 0), - date_type(1, 1, 1, 3, 0, 1, 0), - date_type(1, 1, 1, 7, 0, 6, 0), - ] - ) + expected = xr.CFTimeIndex([ + date_type(1, 1, 1, 1, 59, 59, 0), + date_type(1, 1, 1, 3, 0, 1, 0), + date_type(1, 1, 1, 7, 0, 6, 0), + ]) assert result.equals(expected) @requires_cftime def test_round(rounding_index, date_type): result = rounding_index.round("s") - expected = xr.CFTimeIndex( - [ - date_type(1, 1, 1, 2, 0, 0, 0), - date_type(1, 1, 1, 3, 0, 2, 0), - date_type(1, 1, 1, 7, 0, 6, 0), - ] - ) + expected = xr.CFTimeIndex([ + date_type(1, 1, 1, 2, 0, 0, 0), + date_type(1, 1, 1, 3, 0, 2, 0), + date_type(1, 1, 1, 7, 0, 6, 0), + ]) assert result.equals(expected) diff --git a/xarray/tests/test_coarsen.py b/xarray/tests/test_coarsen.py index 01d5393e289..6595f819f4c 100644 --- a/xarray/tests/test_coarsen.py +++ b/xarray/tests/test_coarsen.py @@ -293,9 +293,10 @@ def test_coarsen_construct(self, dask: bool) -> None: ) with raise_if_dask_computes(): - actual = ds.coarsen(time=12, x=5).construct( - {"time": ("year", "month"), "x": ("x", "x_reshaped")} - ) + actual = ds.coarsen(time=12, x=5).construct({ + "time": ("year", "month"), + "x": ("x", "x_reshaped"), + }) assert_identical(actual, expected) with raise_if_dask_computes(): @@ -313,9 +314,10 @@ def test_coarsen_construct(self, dask: bool) -> None: assert actual.attrs == {} with raise_if_dask_computes(): - actual = ds.vartx.coarsen(time=12, x=5).construct( - {"time": ("year", "month"), "x": ("x", "x_reshaped")} - ) + actual = ds.vartx.coarsen(time=12, x=5).construct({ + "time": ("year", "month"), + "x": ("x", "x_reshaped"), + }) assert_identical(actual, expected["vartx"]) with pytest.raises(ValueError): diff --git a/xarray/tests/test_combine.py b/xarray/tests/test_combine.py index ea1659e4539..be453f77106 100644 --- a/xarray/tests/test_combine.py +++ b/xarray/tests/test_combine.py @@ -161,9 +161,14 @@ def test_2d(self): (0, 2): ds4, (1, 2): ds5, } - actual, concat_dims = _infer_concat_order_from_coords( - [ds1, ds0, ds3, ds5, ds2, ds4] - ) + actual, concat_dims = _infer_concat_order_from_coords([ + ds1, + ds0, + ds3, + ds5, + ds2, + ds4, + ]) assert_combined_tile_ids_equal(expected, actual) assert concat_dims == ["x", "y"] @@ -416,9 +421,11 @@ def test_nested_concat_along_new_dim(self): # Same but with a DataArray as new dim, see GH #1988 and #2647 dim = DataArray([100, 150], name="baz", dims="baz") - expected = Dataset( - {"a": (("baz", "x"), [[10], [20]]), "x": [0], "baz": [100, 150]} - ) + expected = Dataset({ + "a": (("baz", "x"), [[10], [20]]), + "x": [0], + "baz": [100, 150], + }) actual = combine_nested(objs, concat_dim=dim) assert_identical(expected, actual) @@ -853,20 +860,16 @@ def test_combine_nested_combine_attrs_variables( self, combine_attrs, attrs1, attrs2, expected_attrs, expect_exception ): """check that combine_attrs is used on data variables and coords""" - data1 = Dataset( - { - "a": ("x", [1, 2], attrs1), - "b": ("x", [3, -1], attrs1), - "x": ("x", [0, 1], attrs1), - } - ) - data2 = Dataset( - { - "a": ("x", [2, 3], attrs2), - "b": ("x", [-2, 1], attrs2), - "x": ("x", [2, 3], attrs2), - } - ) + data1 = Dataset({ + "a": ("x", [1, 2], attrs1), + "b": ("x", [3, -1], attrs1), + "x": ("x", [0, 1], attrs1), + }) + data2 = Dataset({ + "a": ("x", [2, 3], attrs2), + "b": ("x", [-2, 1], attrs2), + "x": ("x", [2, 3], attrs2), + }) if expect_exception: with pytest.raises(MergeError, match="combine_attrs"): @@ -925,25 +928,27 @@ def test_combine_by_coords_combine_attrs_variables( self, combine_attrs, attrs1, attrs2, expected_attrs, expect_exception ): """check that combine_attrs is used on data variables and coords""" - data1 = Dataset( - {"x": ("a", [0], attrs1), "y": ("a", [0], attrs1), "a": ("a", [0], attrs1)} - ) - data2 = Dataset( - {"x": ("a", [1], attrs2), "y": ("a", [1], attrs2), "a": ("a", [1], attrs2)} - ) + data1 = Dataset({ + "x": ("a", [0], attrs1), + "y": ("a", [0], attrs1), + "a": ("a", [0], attrs1), + }) + data2 = Dataset({ + "x": ("a", [1], attrs2), + "y": ("a", [1], attrs2), + "a": ("a", [1], attrs2), + }) if expect_exception: with pytest.raises(MergeError, match="combine_attrs"): combine_by_coords([data1, data2], combine_attrs=combine_attrs) else: actual = combine_by_coords([data1, data2], combine_attrs=combine_attrs) - expected = Dataset( - { - "x": ("a", [0, 1], expected_attrs), - "y": ("a", [0, 1], expected_attrs), - "a": ("a", [0, 1], expected_attrs), - } - ) + expected = Dataset({ + "x": ("a", [0, 1], expected_attrs), + "y": ("a", [0, 1], expected_attrs), + "a": ("a", [0, 1], expected_attrs), + }) assert_identical(actual, expected) @@ -1089,12 +1094,10 @@ def test_combine_by_coords_all_named_dataarrays(self): named_da2 = DataArray(name="b", data=[3.0, 4.0], coords={"x": [2, 3]}, dims="x") actual = combine_by_coords([named_da1, named_da2]) - expected = Dataset( - { - "a": DataArray(data=[1.0, 2.0], coords={"x": [0, 1]}, dims="x"), - "b": DataArray(data=[3.0, 4.0], coords={"x": [2, 3]}, dims="x"), - } - ) + expected = Dataset({ + "a": DataArray(data=[1.0, 2.0], coords={"x": [0, 1]}, dims="x"), + "b": DataArray(data=[3.0, 4.0], coords={"x": [2, 3]}, dims="x"), + }) assert_identical(expected, actual) def test_combine_by_coords_all_dataarrays_with_the_same_name(self): diff --git a/xarray/tests/test_computation.py b/xarray/tests/test_computation.py index 820fcd48bd3..cedca6c649d 100644 --- a/xarray/tests/test_computation.py +++ b/xarray/tests/test_computation.py @@ -1164,13 +1164,11 @@ def add(a, b, join, dataset_join): # we used np.nan as the fill_value in add() above actual = add(ds0, ds2, "outer", "outer") - expected = xr.Dataset( - { - "a": ("x", [np.nan, np.nan, np.nan]), - "b": ("x", [np.nan, np.nan, np.nan]), - "x": [0, 1, 2], - } - ) + expected = xr.Dataset({ + "a": ("x", [np.nan, np.nan, np.nan]), + "b": ("x", [np.nan, np.nan, np.nan]), + "x": [0, 1, 2], + }) assert_identical(actual, expected) diff --git a/xarray/tests/test_concat.py b/xarray/tests/test_concat.py index 0cf4cc03a09..96cc86072b7 100644 --- a/xarray/tests/test_concat.py +++ b/xarray/tests/test_concat.py @@ -588,15 +588,13 @@ def test_concat_autoalign(self) -> None: ds1 = Dataset({"foo": DataArray([1, 2], coords=[("x", [1, 2])])}) ds2 = Dataset({"foo": DataArray([1, 2], coords=[("x", [1, 3])])}) actual = concat([ds1, ds2], "y") - expected = Dataset( - { - "foo": DataArray( - [[1, 2, np.nan], [1, np.nan, 2]], - dims=["y", "x"], - coords={"x": [1, 2, 3]}, - ) - } - ) + expected = Dataset({ + "foo": DataArray( + [[1, 2, np.nan], [1, np.nan, 2]], + dims=["y", "x"], + coords={"x": [1, 2, 3]}, + ) + }) assert_identical(expected, actual) def test_concat_errors(self): @@ -856,14 +854,12 @@ def test_concat_promote_shape(self) -> None: Dataset({"x": [pd.Interval(0, 1, closed="right")]}), ] actual = concat(objs, "x") - expected = Dataset( - { - "x": [ - pd.Interval(-1, 0, closed="right"), - pd.Interval(0, 1, closed="right"), - ] - } - ) + expected = Dataset({ + "x": [ + pd.Interval(-1, 0, closed="right"), + pd.Interval(0, 1, closed="right"), + ] + }) assert_identical(actual, expected) # regression GH6416 (coord dtype) and GH6434 @@ -960,20 +956,16 @@ def test_concat_fill_value(self, fill_value) -> None: def test_concat_str_dtype(self, dtype, dim) -> None: data = np.arange(4).reshape([2, 2]) - da1 = Dataset( - { - "data": (["x1", "x2"], data), - "x1": [0, 1], - "x2": np.array(["a", "b"], dtype=dtype), - } - ) - da2 = Dataset( - { - "data": (["x1", "x2"], data), - "x1": np.array([1, 2]), - "x2": np.array(["c", "d"], dtype=dtype), - } - ) + da1 = Dataset({ + "data": (["x1", "x2"], data), + "x1": [0, 1], + "x2": np.array(["a", "b"], dtype=dtype), + }) + da2 = Dataset({ + "data": (["x1", "x2"], data), + "x1": np.array([1, 2]), + "x2": np.array(["c", "d"], dtype=dtype), + }) actual = concat([da1, da2], dim=dim) assert np.issubdtype(actual.x2.dtype, dtype) diff --git a/xarray/tests/test_conventions.py b/xarray/tests/test_conventions.py index 91a8e368de5..2ab27fd3054 100644 --- a/xarray/tests/test_conventions.py +++ b/xarray/tests/test_conventions.py @@ -83,14 +83,12 @@ def test_decode_cf_variable_with_mismatched_coordinates() -> None: # tests for decoding mismatched coordinates attributes # see GH #1809 zeros1 = np.zeros((1, 5, 3)) - orig = Dataset( - { - "XLONG": (["x", "y"], zeros1.squeeze(0), {}), - "XLAT": (["x", "y"], zeros1.squeeze(0), {}), - "foo": (["time", "x", "y"], zeros1, {"coordinates": "XTIME XLONG XLAT"}), - "time": ("time", [0.0], {"units": "hours since 2017-01-01"}), - } - ) + orig = Dataset({ + "XLONG": (["x", "y"], zeros1.squeeze(0), {}), + "XLAT": (["x", "y"], zeros1.squeeze(0), {}), + "foo": (["time", "x", "y"], zeros1, {"coordinates": "XTIME XLONG XLAT"}), + "time": ("time", [0.0], {"units": "hours since 2017-01-01"}), + }) decoded = conventions.decode_cf(orig, decode_coords=True) assert decoded["foo"].encoding["coordinates"] == "XTIME XLONG XLAT" assert list(decoded.coords.keys()) == ["XLONG", "XLAT", "time"] @@ -129,20 +127,18 @@ def test_multidimensional_coordinates(self) -> None: zeros1 = np.zeros((1, 5, 3)) zeros2 = np.zeros((1, 6, 3)) zeros3 = np.zeros((1, 5, 4)) - orig = Dataset( - { - "lon1": (["x1", "y1"], zeros1.squeeze(0), {}), - "lon2": (["x2", "y1"], zeros2.squeeze(0), {}), - "lon3": (["x1", "y2"], zeros3.squeeze(0), {}), - "lat1": (["x1", "y1"], zeros1.squeeze(0), {}), - "lat2": (["x2", "y1"], zeros2.squeeze(0), {}), - "lat3": (["x1", "y2"], zeros3.squeeze(0), {}), - "foo1": (["time", "x1", "y1"], zeros1, {"coordinates": "lon1 lat1"}), - "foo2": (["time", "x2", "y1"], zeros2, {"coordinates": "lon2 lat2"}), - "foo3": (["time", "x1", "y2"], zeros3, {"coordinates": "lon3 lat3"}), - "time": ("time", [0.0], {"units": "hours since 2017-01-01"}), - } - ) + orig = Dataset({ + "lon1": (["x1", "y1"], zeros1.squeeze(0), {}), + "lon2": (["x2", "y1"], zeros2.squeeze(0), {}), + "lon3": (["x1", "y2"], zeros3.squeeze(0), {}), + "lat1": (["x1", "y1"], zeros1.squeeze(0), {}), + "lat2": (["x2", "y1"], zeros2.squeeze(0), {}), + "lat3": (["x1", "y2"], zeros3.squeeze(0), {}), + "foo1": (["time", "x1", "y1"], zeros1, {"coordinates": "lon1 lat1"}), + "foo2": (["time", "x2", "y1"], zeros2, {"coordinates": "lon2 lat2"}), + "foo3": (["time", "x1", "y2"], zeros3, {"coordinates": "lon3 lat3"}), + "time": ("time", [0.0], {"units": "hours since 2017-01-01"}), + }) orig = conventions.decode_cf(orig) # Encode the coordinates, as they would be in a netCDF output file. enc, attrs = conventions.encode_dataset_coordinates(orig) @@ -249,13 +245,11 @@ def test_string_object_warning(self) -> None: @requires_cftime class TestDecodeCF: def test_dataset(self) -> None: - original = Dataset( - { - "t": ("t", [0, 1, 2], {"units": "days since 2000-01-01"}), - "foo": ("t", [0, 0, 0], {"coordinates": "y", "units": "bar"}), - "y": ("t", [5, 10, -999], {"_FillValue": -999}), - } - ) + original = Dataset({ + "t": ("t", [0, 1, 2], {"units": "days since 2000-01-01"}), + "foo": ("t", [0, 0, 0], {"coordinates": "y", "units": "bar"}), + "y": ("t", [5, 10, -999], {"_FillValue": -999}), + }) expected = Dataset( {"foo": ("t", [0, 0, 0], {"units": "bar"})}, { @@ -277,9 +271,10 @@ def test_invalid_coordinates(self) -> None: def test_decode_coordinates(self) -> None: # regression test for GH610 - original = Dataset( - {"foo": ("t", [1, 2], {"coordinates": "x"}), "x": ("t", [4, 5])} - ) + original = Dataset({ + "foo": ("t", [1, 2], {"coordinates": "x"}), + "x": ("t", [4, 5]), + }) actual = conventions.decode_cf(original) assert actual.foo.encoding["coordinates"] == "x" @@ -297,29 +292,25 @@ def test_decode_cf_with_multiple_missing_values(self) -> None: assert_identical(expected, actual) def test_decode_cf_with_drop_variables(self) -> None: - original = Dataset( - { - "t": ("t", [0, 1, 2], {"units": "days since 2000-01-01"}), - "x": ("x", [9, 8, 7], {"units": "km"}), - "foo": ( - ("t", "x"), - [[0, 0, 0], [1, 1, 1], [2, 2, 2]], - {"units": "bar"}, - ), - "y": ("t", [5, 10, -999], {"_FillValue": -999}), - } - ) - expected = Dataset( - { - "t": pd.date_range("2000-01-01", periods=3), - "foo": ( - ("t", "x"), - [[0, 0, 0], [1, 1, 1], [2, 2, 2]], - {"units": "bar"}, - ), - "y": ("t", [5, 10, np.nan]), - } - ) + original = Dataset({ + "t": ("t", [0, 1, 2], {"units": "days since 2000-01-01"}), + "x": ("x", [9, 8, 7], {"units": "km"}), + "foo": ( + ("t", "x"), + [[0, 0, 0], [1, 1, 1], [2, 2, 2]], + {"units": "bar"}, + ), + "y": ("t", [5, 10, -999], {"_FillValue": -999}), + }) + expected = Dataset({ + "t": pd.date_range("2000-01-01", periods=3), + "foo": ( + ("t", "x"), + [[0, 0, 0], [1, 1, 1], [2, 2, 2]], + {"units": "bar"}, + ), + "y": ("t", [5, 10, np.nan]), + }) actual = conventions.decode_cf(original, drop_variables=("x",)) actual2 = conventions.decode_cf(original, drop_variables="x") assert_identical(expected, actual) @@ -334,12 +325,12 @@ def test_invalid_time_units_raises_eagerly(self) -> None: @pytest.mark.parametrize("decode_times", [True, False]) def test_invalid_timedelta_units_do_not_decode(self, decode_times) -> None: # regression test for #8269 - ds = Dataset( - {"time": ("time", [0, 1, 20], {"units": "days invalid", "_FillValue": 20})} - ) - expected = Dataset( - {"time": ("time", [0.0, 1.0, np.nan], {"units": "days invalid"})} - ) + ds = Dataset({ + "time": ("time", [0, 1, 20], {"units": "days invalid", "_FillValue": 20}) + }) + expected = Dataset({ + "time": ("time", [0.0, 1.0, np.nan], {"units": "days invalid"}) + }) assert_identical(expected, decode_cf(ds, decode_times=decode_times)) @requires_cftime @@ -375,15 +366,13 @@ def test_decode_cf_datetime_transition_to_invalid(self) -> None: def test_decode_cf_with_dask(self) -> None: import dask.array as da - original = Dataset( - { - "t": ("t", [0, 1, 2], {"units": "days since 2000-01-01"}), - "foo": ("t", [0, 0, 0], {"coordinates": "y", "units": "bar"}), - "bar": ("string2", [b"a", b"b"]), - "baz": (("x"), [b"abc"], {"_Encoding": "utf-8"}), - "y": ("t", [5, 10, -999], {"_FillValue": -999}), - } - ).chunk() + original = Dataset({ + "t": ("t", [0, 1, 2], {"units": "days since 2000-01-01"}), + "foo": ("t", [0, 0, 0], {"coordinates": "y", "units": "bar"}), + "bar": ("string2", [b"a", b"b"]), + "baz": (("x"), [b"abc"], {"_Encoding": "utf-8"}), + "y": ("t", [5, 10, -999], {"_FillValue": -999}), + }).chunk() decoded = conventions.decode_cf(original) assert all( isinstance(var.data, da.Array) @@ -394,45 +383,41 @@ def test_decode_cf_with_dask(self) -> None: @requires_dask def test_decode_dask_times(self) -> None: - original = Dataset.from_dict( - { - "coords": {}, - "dims": {"time": 5}, - "data_vars": { - "average_T1": { - "dims": ("time",), - "attrs": {"units": "days since 1958-01-01 00:00:00"}, - "data": [87659.0, 88024.0, 88389.0, 88754.0, 89119.0], - } - }, - } - ) + original = Dataset.from_dict({ + "coords": {}, + "dims": {"time": 5}, + "data_vars": { + "average_T1": { + "dims": ("time",), + "attrs": {"units": "days since 1958-01-01 00:00:00"}, + "data": [87659.0, 88024.0, 88389.0, 88754.0, 89119.0], + } + }, + }) assert_identical( conventions.decode_cf(original.chunk()), conventions.decode_cf(original).chunk(), ) def test_decode_cf_time_kwargs(self) -> None: - ds = Dataset.from_dict( - { - "coords": { - "timedelta": { - "data": np.array([1, 2, 3], dtype="int64"), - "dims": "timedelta", - "attrs": {"units": "days"}, - }, - "time": { - "data": np.array([1, 2, 3], dtype="int64"), - "dims": "time", - "attrs": {"units": "days since 2000-01-01"}, - }, + ds = Dataset.from_dict({ + "coords": { + "timedelta": { + "data": np.array([1, 2, 3], dtype="int64"), + "dims": "timedelta", + "attrs": {"units": "days"}, }, - "dims": {"time": 3, "timedelta": 3}, - "data_vars": { - "a": {"dims": ("time", "timedelta"), "data": np.ones((3, 3))}, + "time": { + "data": np.array([1, 2, 3], dtype="int64"), + "dims": "time", + "attrs": {"units": "days since 2000-01-01"}, }, - } - ) + }, + "dims": {"time": 3, "timedelta": 3}, + "data_vars": { + "a": {"dims": ("time", "timedelta"), "data": np.ones((3, 3))}, + }, + }) dsc = conventions.decode_cf(ds) assert dsc.timedelta.dtype == np.dtype("m8[ns]") diff --git a/xarray/tests/test_dask.py b/xarray/tests/test_dask.py index 07bf773cc88..49b67936e59 100644 --- a/xarray/tests/test_dask.py +++ b/xarray/tests/test_dask.py @@ -789,9 +789,11 @@ def test_to_dask_dataframe(self): y = np.arange(10, dtype="uint8") t = list("abcdefghij") - ds = Dataset( - {"a": ("t", da.from_array(x, chunks=4)), "b": ("t", y), "t": ("t", t)} - ) + ds = Dataset({ + "a": ("t", da.from_array(x, chunks=4)), + "b": ("t", y), + "t": ("t", t), + }) expected_pd = pd.DataFrame({"a": x, "b": y}, index=pd.Index(t, name="t")) @@ -854,12 +856,10 @@ def test_to_dask_dataframe_coordinates(self): x = np.random.randn(10) t = np.arange(10) * 2 - ds = Dataset( - { - "a": ("t", da.from_array(x, chunks=4)), - "t": ("t", da.from_array(t, chunks=4)), - } - ) + ds = Dataset({ + "a": ("t", da.from_array(x, chunks=4)), + "t": ("t", da.from_array(t, chunks=4)), + }) expected_pd = pd.DataFrame({"a": x}, index=pd.Index(t, name="t")) expected = dd.from_pandas(expected_pd, chunksize=4) @@ -1454,12 +1454,10 @@ def func(obj): def test_map_blocks_hlg_layers(): # regression test for #3599 - ds = xr.Dataset( - { - "x": (("a",), dask.array.ones(10, chunks=(5,))), - "z": (("b",), dask.array.ones(10, chunks=(5,))), - } - ) + ds = xr.Dataset({ + "x": (("a",), dask.array.ones(10, chunks=(5,))), + "z": (("b",), dask.array.ones(10, chunks=(5,))), + }) mapped = ds.map_blocks(lambda x: x) xr.testing.assert_equal(mapped, ds) diff --git a/xarray/tests/test_dataarray.py b/xarray/tests/test_dataarray.py index f6964f3ad8c..1c0b8a5f8fb 100644 --- a/xarray/tests/test_dataarray.py +++ b/xarray/tests/test_dataarray.py @@ -319,22 +319,22 @@ def test_constructor(self) -> None: assert_identical(expected, actual) actual = DataArray(data, [["a", "b"], [-1, -2, -3]]) - expected = Dataset( - { - None: (["dim_0", "dim_1"], data), - "dim_0": ("dim_0", ["a", "b"]), - "dim_1": ("dim_1", [-1, -2, -3]), - } - )[None] + expected = Dataset({ + None: (["dim_0", "dim_1"], data), + "dim_0": ("dim_0", ["a", "b"]), + "dim_1": ("dim_1", [-1, -2, -3]), + })[None] assert_identical(expected, actual) # pd.Index coords, w/o dims actual = DataArray( data, [pd.Index(["a", "b"], name="x"), pd.Index([-1, -2, -3], name="y")] ) - expected = Dataset( - {None: (["x", "y"], data), "x": ("x", ["a", "b"]), "y": ("y", [-1, -2, -3])} - )[None] + expected = Dataset({ + None: (["x", "y"], data), + "x": ("x", ["a", "b"]), + "y": ("y", [-1, -2, -3]), + })[None] assert_identical(expected, actual) # list coords, w dims @@ -510,8 +510,7 @@ def test_constructor_multiindex(self) -> None: assert_identical(da.coords, coords) def test_constructor_custom_index(self) -> None: - class CustomIndex(Index): - ... + class CustomIndex(Index): ... coords = Coordinates( coords={"x": ("x", [1, 2, 3])}, indexes={"x": CustomIndex()} @@ -1525,14 +1524,12 @@ def test_reset_coords(self) -> None: ) actual1 = data.reset_coords() - expected1 = Dataset( - { - "foo": (["x", "y"], np.zeros((3, 4))), - "bar": ("x", ["a", "b", "c"]), - "baz": ("y", range(4)), - "y": range(4), - } - ) + expected1 = Dataset({ + "foo": (["x", "y"], np.zeros((3, 4))), + "bar": ("x", ["a", "b", "c"]), + "baz": ("y", range(4)), + "y": range(4), + }) assert_identical(actual1, expected1) actual2 = data.reset_coords(["bar", "baz"]) @@ -2444,14 +2441,12 @@ def test_dataset_math(self) -> None: expected2 = DataArray(10 * np.ones(5), obs.coords) assert_identical(actual2, expected2) - sim = Dataset( - { - "tmin": ("x", 1 + np.arange(5)), - "tmax": ("x", 11 + np.arange(5)), - # does *not* include 'loc' as a coordinate - "x": ("x", 0.5 * np.arange(5)), - } - ) + sim = Dataset({ + "tmin": ("x", 1 + np.arange(5)), + "tmax": ("x", 11 + np.arange(5)), + # does *not* include 'loc' as a coordinate + "x": ("x", 0.5 * np.arange(5)), + }) actual3 = sim["tmin"] - obs["tmin"] expected3 = DataArray(np.ones(5), obs.coords, name="tmin") @@ -5196,39 +5191,33 @@ def test_argmax_dim( ["x", "minindex", "maxindex", "nanindex"], [ pytest.param( - np.array( - [ - [0, 1, 2, 0, -2, -4, 2], - [1, 1, 1, 1, 1, 1, 1], - [0, 0, -10, 5, 20, 0, 0], - ] - ), + np.array([ + [0, 1, 2, 0, -2, -4, 2], + [1, 1, 1, 1, 1, 1, 1], + [0, 0, -10, 5, 20, 0, 0], + ]), [5, 0, 2], [2, 0, 4], [None, None, None], id="int", ), pytest.param( - np.array( - [ - [2.0, 1.0, 2.0, 0.0, -2.0, -4.0, 2.0], - [-4.0, np.nan, 2.0, np.nan, -2.0, -4.0, 2.0], - [np.nan] * 7, - ] - ), + np.array([ + [2.0, 1.0, 2.0, 0.0, -2.0, -4.0, 2.0], + [-4.0, np.nan, 2.0, np.nan, -2.0, -4.0, 2.0], + [np.nan] * 7, + ]), [5, 0, np.nan], [0, 2, np.nan], [None, 1, 0], id="nan", ), pytest.param( - np.array( - [ - [2.0, 1.0, 2.0, 0.0, -2.0, -4.0, 2.0], - [-4.0, np.nan, 2.0, np.nan, -2.0, -4.0, 2.0], - [np.nan] * 7, - ] - ).astype("object"), + np.array([ + [2.0, 1.0, 2.0, 0.0, -2.0, -4.0, 2.0], + [-4.0, np.nan, 2.0, np.nan, -2.0, -4.0, 2.0], + [np.nan] * 7, + ]).astype("object"), [5, 0, np.nan], [0, 2, np.nan], [None, 1, 0], @@ -5876,13 +5865,11 @@ def test_argmax_dim( "nanindices_xz, nanindices_yz, nanindices_xyz", [ pytest.param( - np.array( - [ - [[0, 1, 2, 0], [-2, -4, 2, 0]], - [[1, 1, 1, 1], [1, 1, 1, 1]], - [[0, 0, -10, 5], [20, 0, 0, 0]], - ] - ), + np.array([ + [[0, 1, 2, 0], [-2, -4, 2, 0]], + [[1, 1, 1, 1], [1, 1, 1, 1]], + [[0, 0, -10, 5], [20, 0, 0, 0]], + ]), {"x": np.array([[0, 2, 2, 0], [0, 0, 2, 0]])}, {"y": np.array([[1, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1]])}, {"z": np.array([[0, 1], [0, 0], [2, 1]])}, @@ -5899,13 +5886,11 @@ def test_argmax_dim( {"x": np.array(2), "y": np.array(1), "z": np.array(0)}, {"x": np.array([[None, None, None, None], [None, None, None, None]])}, { - "y": np.array( - [ - [None, None, None, None], - [None, None, None, None], - [None, None, None, None], - ] - ) + "y": np.array([ + [None, None, None, None], + [None, None, None, None], + [None, None, None, None], + ]) }, {"z": np.array([[None, None], [None, None], [None, None]])}, { @@ -5918,18 +5903,18 @@ def test_argmax_dim( id="int", ), pytest.param( - np.array( - [ - [[2.0, 1.0, 2.0, 0.0], [-2.0, -4.0, 2.0, 0.0]], - [[-4.0, np.nan, 2.0, np.nan], [-2.0, -4.0, 2.0, 0.0]], - [[np.nan] * 4, [np.nan] * 4], - ] - ), + np.array([ + [[2.0, 1.0, 2.0, 0.0], [-2.0, -4.0, 2.0, 0.0]], + [[-4.0, np.nan, 2.0, np.nan], [-2.0, -4.0, 2.0, 0.0]], + [[np.nan] * 4, [np.nan] * 4], + ]), {"x": np.array([[1, 0, 0, 0], [0, 0, 0, 0]])}, { - "y": np.array( - [[1, 1, 0, 0], [0, 1, 0, 1], [np.nan, np.nan, np.nan, np.nan]] - ) + "y": np.array([ + [1, 1, 0, 0], + [0, 1, 0, 1], + [np.nan, np.nan, np.nan, np.nan], + ]) }, {"z": np.array([[3, 1], [0, 1], [np.nan, np.nan]])}, {"x": np.array([1, 0, 0, 0]), "y": np.array([0, 1, 0, 0])}, @@ -5938,9 +5923,11 @@ def test_argmax_dim( {"x": np.array(0), "y": np.array(1), "z": np.array(1)}, {"x": np.array([[0, 0, 0, 0], [0, 0, 0, 0]])}, { - "y": np.array( - [[0, 0, 0, 0], [1, 1, 0, 1], [np.nan, np.nan, np.nan, np.nan]] - ) + "y": np.array([ + [0, 0, 0, 0], + [1, 1, 0, 1], + [np.nan, np.nan, np.nan, np.nan], + ]) }, {"z": np.array([[0, 2], [2, 2], [np.nan, np.nan]])}, {"x": np.array([0, 0, 0, 0]), "y": np.array([0, 0, 0, 0])}, @@ -5949,9 +5936,11 @@ def test_argmax_dim( {"x": np.array(0), "y": np.array(0), "z": np.array(0)}, {"x": np.array([[2, 1, 2, 1], [2, 2, 2, 2]])}, { - "y": np.array( - [[None, None, None, None], [None, 0, None, 0], [0, 0, 0, 0]] - ) + "y": np.array([ + [None, None, None, None], + [None, 0, None, 0], + [0, 0, 0, 0], + ]) }, {"z": np.array([[None, None], [1, None], [0, 0]])}, {"x": np.array([2, 1, 2, 1]), "y": np.array([0, 0, 0, 0])}, @@ -5961,18 +5950,18 @@ def test_argmax_dim( id="nan", ), pytest.param( - np.array( - [ - [[2.0, 1.0, 2.0, 0.0], [-2.0, -4.0, 2.0, 0.0]], - [[-4.0, np.nan, 2.0, np.nan], [-2.0, -4.0, 2.0, 0.0]], - [[np.nan] * 4, [np.nan] * 4], - ] - ).astype("object"), + np.array([ + [[2.0, 1.0, 2.0, 0.0], [-2.0, -4.0, 2.0, 0.0]], + [[-4.0, np.nan, 2.0, np.nan], [-2.0, -4.0, 2.0, 0.0]], + [[np.nan] * 4, [np.nan] * 4], + ]).astype("object"), {"x": np.array([[1, 0, 0, 0], [0, 0, 0, 0]])}, { - "y": np.array( - [[1, 1, 0, 0], [0, 1, 0, 1], [np.nan, np.nan, np.nan, np.nan]] - ) + "y": np.array([ + [1, 1, 0, 0], + [0, 1, 0, 1], + [np.nan, np.nan, np.nan, np.nan], + ]) }, {"z": np.array([[3, 1], [0, 1], [np.nan, np.nan]])}, {"x": np.array([1, 0, 0, 0]), "y": np.array([0, 1, 0, 0])}, @@ -5981,9 +5970,11 @@ def test_argmax_dim( {"x": np.array(0), "y": np.array(1), "z": np.array(1)}, {"x": np.array([[0, 0, 0, 0], [0, 0, 0, 0]])}, { - "y": np.array( - [[0, 0, 0, 0], [1, 1, 0, 1], [np.nan, np.nan, np.nan, np.nan]] - ) + "y": np.array([ + [0, 0, 0, 0], + [1, 1, 0, 1], + [np.nan, np.nan, np.nan, np.nan], + ]) }, {"z": np.array([[0, 2], [2, 2], [np.nan, np.nan]])}, {"x": np.array([0, 0, 0, 0]), "y": np.array([0, 0, 0, 0])}, @@ -5992,9 +5983,11 @@ def test_argmax_dim( {"x": np.array(0), "y": np.array(0), "z": np.array(0)}, {"x": np.array([[2, 1, 2, 1], [2, 2, 2, 2]])}, { - "y": np.array( - [[None, None, None, None], [None, 0, None, 0], [0, 0, 0, 0]] - ) + "y": np.array([ + [None, None, None, None], + [None, 0, None, 0], + [0, 0, 0, 0], + ]) }, {"z": np.array([[None, None], [1, None], [0, 0]])}, {"x": np.array([2, 1, 2, 1]), "y": np.array([0, 0, 0, 0])}, diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py index 4128dbe1a99..58f8869e308 100644 --- a/xarray/tests/test_dataset.py +++ b/xarray/tests/test_dataset.py @@ -616,9 +616,10 @@ def test_constructor_compat(self) -> None: {"c": (("x", "y"), np.zeros((2, 3))), "x": [0, 1]}, ) - actual = Dataset( - {"a": original["a"][:, 0], "b": original["a"][0].drop_vars("x")} - ) + actual = Dataset({ + "a": original["a"][:, 0], + "b": original["a"][0].drop_vars("x"), + }) assert_identical(expected, actual) data = {"x": DataArray(0, coords={"y": 3}), "y": ("z", [1, 1, 1])} @@ -675,8 +676,7 @@ def test_constructor_multiindex(self) -> None: Dataset(coords={"x": midx}) def test_constructor_custom_index(self) -> None: - class CustomIndex(Index): - ... + class CustomIndex(Index): ... coords = Coordinates( coords={"x": ("x", [1, 2, 3])}, indexes={"x": CustomIndex()} @@ -1750,9 +1750,10 @@ def test_categorical_multiindex(self) -> None: cat = pd.CategoricalDtype(categories=["foo", "baz", "bar"]) i2 = pd.Series(["baz", "bar"], dtype=cat) - df = pd.DataFrame({"i1": i1, "i2": i2, "values": [1, 2]}).set_index( - ["i1", "i2"] - ) + df = pd.DataFrame({"i1": i1, "i2": i2, "values": [1, 2]}).set_index([ + "i1", + "i2", + ]) actual = df.to_xarray() assert actual["values"].shape == (1, 2) @@ -2243,13 +2244,11 @@ def test_reindex_fill_value(self, fill_value) -> None: fill_value_z = fill_value["z"] else: fill_value_x = fill_value_z = fill_value - expected = Dataset( - { - "x": ("y", [10, 20, fill_value_x]), - "z": ("y", [-20, -10, fill_value_z]), - "y": y, - } - ) + expected = Dataset({ + "x": ("y", [10, 20, fill_value_x]), + "z": ("y", [-20, -10, fill_value_z]), + "y": y, + }) assert_identical(expected, actual) @pytest.mark.parametrize("fill_value", [dtypes.NA, 2, 2.0, {"x": 2, "z": 1}]) @@ -2267,13 +2266,11 @@ def test_reindex_like_fill_value(self, fill_value) -> None: fill_value_z = fill_value["z"] else: fill_value_x = fill_value_z = fill_value - expected = Dataset( - { - "x": ("y", [10, 20, fill_value_x]), - "z": ("y", [-20, -10, fill_value_z]), - "y": y, - } - ) + expected = Dataset({ + "x": ("y", [10, 20, fill_value_x]), + "z": ("y", [-20, -10, fill_value_z]), + "y": y, + }) assert_identical(expected, actual) @pytest.mark.parametrize("dtype", [str, bytes]) @@ -2301,20 +2298,16 @@ def test_align_fill_value(self, fill_value) -> None: else: fill_value_foo = fill_value_bar = fill_value - expected_x2 = Dataset( - { - "foo": DataArray( - [1, 2, fill_value_foo], dims=["x"], coords={"x": [1, 2, 3]} - ) - } - ) - expected_y2 = Dataset( - { - "bar": DataArray( - [1, fill_value_bar, 2], dims=["x"], coords={"x": [1, 2, 3]} - ) - } - ) + expected_x2 = Dataset({ + "foo": DataArray( + [1, 2, fill_value_foo], dims=["x"], coords={"x": [1, 2, 3]} + ) + }) + expected_y2 = Dataset({ + "bar": DataArray( + [1, fill_value_bar, 2], dims=["x"], coords={"x": [1, 2, 3]} + ) + }) assert_identical(expected_x2, x2) assert_identical(expected_y2, y2) @@ -2399,40 +2392,32 @@ def test_align_override(self) -> None: xr.align(left.isel(x=0).expand_dims("x"), right, join="override") def test_align_exclude(self) -> None: - x = Dataset( - { - "foo": DataArray( - [[1, 2], [3, 4]], dims=["x", "y"], coords={"x": [1, 2], "y": [3, 4]} - ) - } - ) - y = Dataset( - { - "bar": DataArray( - [[1, 2], [3, 4]], dims=["x", "y"], coords={"x": [1, 3], "y": [5, 6]} - ) - } - ) + x = Dataset({ + "foo": DataArray( + [[1, 2], [3, 4]], dims=["x", "y"], coords={"x": [1, 2], "y": [3, 4]} + ) + }) + y = Dataset({ + "bar": DataArray( + [[1, 2], [3, 4]], dims=["x", "y"], coords={"x": [1, 3], "y": [5, 6]} + ) + }) x2, y2 = align(x, y, exclude=["y"], join="outer") - expected_x2 = Dataset( - { - "foo": DataArray( - [[1, 2], [3, 4], [np.nan, np.nan]], - dims=["x", "y"], - coords={"x": [1, 2, 3], "y": [3, 4]}, - ) - } - ) - expected_y2 = Dataset( - { - "bar": DataArray( - [[1, 2], [np.nan, np.nan], [3, 4]], - dims=["x", "y"], - coords={"x": [1, 2, 3], "y": [5, 6]}, - ) - } - ) + expected_x2 = Dataset({ + "foo": DataArray( + [[1, 2], [3, 4], [np.nan, np.nan]], + dims=["x", "y"], + coords={"x": [1, 2, 3], "y": [3, 4]}, + ) + }) + expected_y2 = Dataset({ + "bar": DataArray( + [[1, 2], [np.nan, np.nan], [3, 4]], + dims=["x", "y"], + coords={"x": [1, 2, 3], "y": [5, 6]}, + ) + }) assert_identical(expected_x2, x2) assert_identical(expected_y2, y2) @@ -2440,9 +2425,9 @@ def test_align_nocopy(self) -> None: x = Dataset({"foo": DataArray([1, 2, 3], coords=[("x", [1, 2, 3])])}) y = Dataset({"foo": DataArray([1, 2], coords=[("x", [1, 2])])}) expected_x2 = x - expected_y2 = Dataset( - {"foo": DataArray([1, 2, np.nan], coords=[("x", [1, 2, 3])])} - ) + expected_y2 = Dataset({ + "foo": DataArray([1, 2, np.nan], coords=[("x", [1, 2, 3])]) + }) x2, y2 = align(x, y, copy=False, join="outer") assert_identical(expected_x2, x2) @@ -2457,9 +2442,9 @@ def test_align_nocopy(self) -> None: def test_align_indexes(self) -> None: x = Dataset({"foo": DataArray([1, 2, 3], dims="x", coords=[("x", [1, 2, 3])])}) (x2,) = align(x, indexes={"x": [2, 3, 1]}) - expected_x2 = Dataset( - {"foo": DataArray([2, 3, 1], dims="x", coords={"x": [2, 3, 1]})} - ) + expected_x2 = Dataset({ + "foo": DataArray([2, 3, 1], dims="x", coords={"x": [2, 3, 1]}) + }) assert_identical(expected_x2, x2) @@ -2549,77 +2534,61 @@ def test_broadcast_nocopy(self) -> None: assert source_ndarray(actual_x["foo"].data) is source_ndarray(x["foo"].data) def test_broadcast_exclude(self) -> None: - x = Dataset( - { - "foo": DataArray( - [[1, 2], [3, 4]], dims=["x", "y"], coords={"x": [1, 2], "y": [3, 4]} - ), - "bar": DataArray(5), - } - ) - y = Dataset( - { - "foo": DataArray( - [[1, 2]], dims=["z", "y"], coords={"z": [1], "y": [5, 6]} - ) - } - ) + x = Dataset({ + "foo": DataArray( + [[1, 2], [3, 4]], dims=["x", "y"], coords={"x": [1, 2], "y": [3, 4]} + ), + "bar": DataArray(5), + }) + y = Dataset({ + "foo": DataArray([[1, 2]], dims=["z", "y"], coords={"z": [1], "y": [5, 6]}) + }) x2, y2 = broadcast(x, y, exclude=["y"]) - expected_x2 = Dataset( - { - "foo": DataArray( - [[[1, 2]], [[3, 4]]], - dims=["x", "z", "y"], - coords={"z": [1], "x": [1, 2], "y": [3, 4]}, - ), - "bar": DataArray( - [[5], [5]], dims=["x", "z"], coords={"x": [1, 2], "z": [1]} - ), - } - ) - expected_y2 = Dataset( - { - "foo": DataArray( - [[[1, 2]], [[1, 2]]], - dims=["x", "z", "y"], - coords={"z": [1], "x": [1, 2], "y": [5, 6]}, - ) - } - ) + expected_x2 = Dataset({ + "foo": DataArray( + [[[1, 2]], [[3, 4]]], + dims=["x", "z", "y"], + coords={"z": [1], "x": [1, 2], "y": [3, 4]}, + ), + "bar": DataArray( + [[5], [5]], dims=["x", "z"], coords={"x": [1, 2], "z": [1]} + ), + }) + expected_y2 = Dataset({ + "foo": DataArray( + [[[1, 2]], [[1, 2]]], + dims=["x", "z", "y"], + coords={"z": [1], "x": [1, 2], "y": [5, 6]}, + ) + }) assert_identical(expected_x2, x2) assert_identical(expected_y2, y2) def test_broadcast_misaligned(self) -> None: x = Dataset({"foo": DataArray([1, 2, 3], coords=[("x", [-1, -2, -3])])}) - y = Dataset( - { - "bar": DataArray( - [[1, 2], [3, 4]], - dims=["y", "x"], - coords={"y": [1, 2], "x": [10, -3]}, - ) - } - ) + y = Dataset({ + "bar": DataArray( + [[1, 2], [3, 4]], + dims=["y", "x"], + coords={"y": [1, 2], "x": [10, -3]}, + ) + }) x2, y2 = broadcast(x, y) - expected_x2 = Dataset( - { - "foo": DataArray( - [[3, 3], [2, 2], [1, 1], [np.nan, np.nan]], - dims=["x", "y"], - coords={"y": [1, 2], "x": [-3, -2, -1, 10]}, - ) - } - ) - expected_y2 = Dataset( - { - "bar": DataArray( - [[2, 4], [np.nan, np.nan], [np.nan, np.nan], [1, 3]], - dims=["x", "y"], - coords={"y": [1, 2], "x": [-3, -2, -1, 10]}, - ) - } - ) + expected_x2 = Dataset({ + "foo": DataArray( + [[3, 3], [2, 2], [1, 1], [np.nan, np.nan]], + dims=["x", "y"], + coords={"y": [1, 2], "x": [-3, -2, -1, 10]}, + ) + }) + expected_y2 = Dataset({ + "bar": DataArray( + [[2, 4], [np.nan, np.nan], [np.nan, np.nan], [1, 3]], + dims=["x", "y"], + coords={"y": [1, 2], "x": [-3, -2, -1, 10]}, + ) + }) assert_identical(expected_x2, x2) assert_identical(expected_y2, y2) @@ -2748,9 +2717,11 @@ def test_drop_index_labels(self) -> None: data.drop_sel(x=0) def test_drop_labels_by_keyword(self) -> None: - data = Dataset( - {"A": (["x", "y"], np.random.randn(2, 6)), "x": ["a", "b"], "y": range(6)} - ) + data = Dataset({ + "A": (["x", "y"], np.random.randn(2, 6)), + "x": ["a", "b"], + "y": range(6), + }) # Basic functionality. assert len(data.coords["x"]) == 2 @@ -2784,9 +2755,11 @@ def test_drop_labels_by_keyword(self) -> None: data.drop(dim="x", x="a") def test_drop_labels_by_position(self) -> None: - data = Dataset( - {"A": (["x", "y"], np.random.randn(2, 6)), "x": ["a", "b"], "y": range(6)} - ) + data = Dataset({ + "A": (["x", "y"], np.random.randn(2, 6)), + "x": ["a", "b"], + "y": range(6), + }) # Basic functionality. assert len(data.coords["x"]) == 2 @@ -2851,14 +2824,12 @@ def test_drop_indexes(self) -> None: ds.drop_indexes("a") def test_drop_dims(self) -> None: - data = xr.Dataset( - { - "A": (["x", "y"], np.random.randn(2, 3)), - "B": ("x", np.random.randn(2)), - "x": ["a", "b"], - "z": np.pi, - } - ) + data = xr.Dataset({ + "A": (["x", "y"], np.random.randn(2, 3)), + "B": ("x", np.random.randn(2)), + "x": ["a", "b"], + "z": np.pi, + }) actual = data.drop_dims("x") expected = data.drop_vars(["A", "B", "x"]) @@ -3063,9 +3034,11 @@ def test_rename_same_name(self) -> None: def test_rename_dims(self) -> None: original = Dataset({"x": ("x", [0, 1, 2]), "y": ("x", [10, 11, 12]), "z": 42}) - expected = Dataset( - {"x": ("x_new", [0, 1, 2]), "y": ("x_new", [10, 11, 12]), "z": 42} - ) + expected = Dataset({ + "x": ("x_new", [0, 1, 2]), + "y": ("x_new", [10, 11, 12]), + "z": 42, + }) # TODO: (benbovy - explicit indexes) update when set_index supports # setting index for non-dimension variables expected = expected.set_coords("x") @@ -3084,9 +3057,11 @@ def test_rename_dims(self) -> None: def test_rename_vars(self) -> None: original = Dataset({"x": ("x", [0, 1, 2]), "y": ("x", [10, 11, 12]), "z": 42}) - expected = Dataset( - {"x_new": ("x", [0, 1, 2]), "y": ("x", [10, 11, 12]), "z": 42} - ) + expected = Dataset({ + "x_new": ("x", [0, 1, 2]), + "y": ("x", [10, 11, 12]), + "z": 42, + }) # TODO: (benbovy - explicit indexes) update when set_index supports # setting index for non-dimension variables expected = expected.set_coords("x_new") @@ -3603,8 +3578,7 @@ def test_set_xindex(self) -> None: expected_mindex = ds.set_index(x=["foo", "bar"]) assert_identical(actual_mindex, expected_mindex) - class NotAnIndex: - ... + class NotAnIndex: ... with pytest.raises(TypeError, match=".*not a subclass of xarray.Index"): ds.set_xindex("foo", NotAnIndex) # type: ignore @@ -3744,9 +3718,11 @@ def test_unstack(self) -> None: index = pd.MultiIndex.from_product([[0, 1], ["a", "b"]], names=["x", "y"]) coords = Coordinates.from_pandas_multiindex(index, "z") ds = Dataset(data_vars={"b": ("z", [0, 1, 2, 3])}, coords=coords) - expected = Dataset( - {"b": (("x", "y"), [[0, 1], [2, 3]]), "x": [0, 1], "y": ["a", "b"]} - ) + expected = Dataset({ + "b": (("x", "y"), [[0, 1], [2, 3]]), + "x": [0, 1], + "y": ["a", "b"], + }) # check attrs propagated ds["x"].attrs["foo"] = "bar" @@ -3843,14 +3819,12 @@ def test_unstack_sparse(self) -> None: assert_equal(expected3, actual3) def test_stack_unstack_fast(self) -> None: - ds = Dataset( - { - "a": ("x", [0, 1]), - "b": (("x", "y"), [[0, 1], [2, 3]]), - "x": [0, 1], - "y": ["a", "b"], - } - ) + ds = Dataset({ + "a": ("x", [0, 1]), + "b": (("x", "y"), [[0, 1], [2, 3]]), + "x": [0, 1], + "y": ["a", "b"], + }) actual = ds.stack(z=["x", "y"]).unstack("z") assert actual.broadcast_equals(ds) @@ -4571,9 +4545,9 @@ def test_squeeze(self) -> None: def get_args(v): return [set(args[0]) & set(v.dims)] if args else [] - expected = Dataset( - {k: v.squeeze(*get_args(v)) for k, v in data.variables.items()} - ) + expected = Dataset({ + k: v.squeeze(*get_args(v)) for k, v in data.variables.items() + }) expected = expected.set_coords(data.coords) assert_identical(expected, data.squeeze(*args)) # invalid squeeze @@ -4746,9 +4720,11 @@ def test_from_dataframe_categorical_string_categories(self) -> None: def test_from_dataframe_sparse(self) -> None: import sparse - df_base = pd.DataFrame( - {"x": range(10), "y": list("abcdefghij"), "z": np.arange(0, 100, 10)} - ) + df_base = pd.DataFrame({ + "x": range(10), + "y": list("abcdefghij"), + "z": np.arange(0, 100, 10), + }) ds_sparse = Dataset.from_dataframe(df_base.set_index("x"), sparse=True) ds_dense = Dataset.from_dataframe(df_base.set_index("x"), sparse=False) @@ -4825,18 +4801,16 @@ def test_from_dataframe_non_unique_columns(self) -> None: def test_convert_dataframe_with_many_types_and_multiindex(self) -> None: # regression test for GH737 - df = pd.DataFrame( - { - "a": list("abc"), - "b": list(range(1, 4)), - "c": np.arange(3, 6).astype("u1"), - "d": np.arange(4.0, 7.0, dtype="float64"), - "e": [True, False, True], - "f": pd.Categorical(list("abc")), - "g": pd.date_range("20130101", periods=3), - "h": pd.date_range("20130101", periods=3, tz="America/New_York"), - } - ) + df = pd.DataFrame({ + "a": list("abc"), + "b": list(range(1, 4)), + "c": np.arange(3, 6).astype("u1"), + "d": np.arange(4.0, 7.0, dtype="float64"), + "e": [True, False, True], + "f": pd.Categorical(list("abc")), + "g": pd.date_range("20130101", periods=3), + "h": pd.date_range("20130101", periods=3, tz="America/New_York"), + }) df.index = pd.MultiIndex.from_product([["a"], range(3)], names=["one", "two"]) roundtripped = Dataset.from_dataframe(df).to_dataframe() # we can't do perfectly, but we should be at least as faithful as @@ -4946,14 +4920,12 @@ def test_to_and_from_dict_with_time_dim(self) -> None: y = np.random.randn(10, 3) t = pd.date_range("20130101", periods=10) lat = [77.7, 83.2, 76] - ds = Dataset( - { - "a": (["t", "lat"], x), - "b": (["t", "lat"], y), - "t": ("t", t), - "lat": ("lat", lat), - } - ) + ds = Dataset({ + "a": (["t", "lat"], x), + "b": (["t", "lat"], y), + "t": ("t", t), + "lat": ("lat", lat), + }) roundtripped = Dataset.from_dict(ds.to_dict()) assert_identical(ds, roundtripped) @@ -4968,14 +4940,12 @@ def test_to_and_from_dict_with_nan_nat( t[2] = np.nan lat = [77.7, 83.2, 76] - ds = Dataset( - { - "a": (["t", "lat"], x), - "b": (["t", "lat"], y), - "t": ("t", t), - "lat": ("lat", lat), - } - ) + ds = Dataset({ + "a": (["t", "lat"], x), + "b": (["t", "lat"], y), + "t": ("t", t), + "lat": ("lat", lat), + }) roundtripped = Dataset.from_dict(ds.to_dict(data=data)) assert_identical(ds, roundtripped) @@ -5294,9 +5264,10 @@ def test_where_drop(self) -> None: # 1d multiple variables ds = Dataset({"a": (("x"), [0, 1, 2, 3]), "b": (("x"), [4, 5, 6, 7])}) - expected5 = Dataset( - {"a": (("x"), [np.nan, 1, 2, 3]), "b": (("x"), [4, 5, 6, np.nan])} - ) + expected5 = Dataset({ + "a": (("x"), [np.nan, 1, 2, 3]), + "b": (("x"), [4, 5, 6, np.nan]), + }) actual5 = ds.where((ds > 0) & (ds < 7), drop=True) assert_identical(expected5, actual5) @@ -5323,33 +5294,28 @@ def test_where_drop(self) -> None: assert_identical(expected7, actual7) # 2d multiple variables - ds = Dataset( - {"a": (("x", "y"), [[0, 1], [2, 3]]), "b": (("x", "y"), [[4, 5], [6, 7]])} - ) - expected8 = Dataset( - { - "a": (("x", "y"), [[np.nan, 1], [2, 3]]), - "b": (("x", "y"), [[4, 5], [6, 7]]), - } - ) + ds = Dataset({ + "a": (("x", "y"), [[0, 1], [2, 3]]), + "b": (("x", "y"), [[4, 5], [6, 7]]), + }) + expected8 = Dataset({ + "a": (("x", "y"), [[np.nan, 1], [2, 3]]), + "b": (("x", "y"), [[4, 5], [6, 7]]), + }) actual8 = ds.where(ds > 0, drop=True) assert_identical(expected8, actual8) # mixed dimensions: PR#6690, Issue#6227 - ds = xr.Dataset( - { - "a": ("x", [1, 2, 3]), - "b": ("y", [2, 3, 4]), - "c": (("x", "y"), np.arange(9).reshape((3, 3))), - } - ) - expected9 = xr.Dataset( - { - "a": ("x", [np.nan, 3]), - "b": ("y", [np.nan, 3, 4]), - "c": (("x", "y"), np.arange(3.0, 9.0).reshape((2, 3))), - } - ) + ds = xr.Dataset({ + "a": ("x", [1, 2, 3]), + "b": ("y", [2, 3, 4]), + "c": (("x", "y"), np.arange(9).reshape((3, 3))), + }) + expected9 = xr.Dataset({ + "a": ("x", [np.nan, 3]), + "b": ("y", [np.nan, 3, 4]), + "c": (("x", "y"), np.arange(3.0, 9.0).reshape((2, 3))), + }) actual9 = ds.where(ds > 2, drop=True) assert actual9.sizes["x"] == 2 assert_identical(expected9, actual9) @@ -5405,16 +5371,15 @@ def test_reduce_coords(self) -> None: assert_identical(actual, expected) def test_mean_uint_dtype(self) -> None: - data = xr.Dataset( - { - "a": (("x", "y"), np.arange(6).reshape(3, 2).astype("uint")), - "b": (("x",), np.array([0.1, 0.2, np.nan])), - } - ) + data = xr.Dataset({ + "a": (("x", "y"), np.arange(6).reshape(3, 2).astype("uint")), + "b": (("x",), np.array([0.1, 0.2, np.nan])), + }) actual = data.mean("x", skipna=True) - expected = xr.Dataset( - {"a": data["a"].mean("x"), "b": data["b"].mean("x", skipna=True)} - ) + expected = xr.Dataset({ + "a": data["a"].mean("x"), + "b": data["b"].mean("x", skipna=True), + }) assert_identical(actual, expected) def test_reduce_bad_dim(self) -> None: @@ -5426,14 +5391,18 @@ def test_reduce_bad_dim(self) -> None: data.mean(dim="bad_dim") def test_reduce_cumsum(self) -> None: - data = xr.Dataset( - {"a": 1, "b": ("x", [1, 2]), "c": (("x", "y"), [[np.nan, 3], [0, 4]])} - ) + data = xr.Dataset({ + "a": 1, + "b": ("x", [1, 2]), + "c": (("x", "y"), [[np.nan, 3], [0, 4]]), + }) assert_identical(data.fillna(0), data.cumsum("y")) - expected = xr.Dataset( - {"a": 1, "b": ("x", [1, 3]), "c": (("x", "y"), [[0, 3], [0, 7]])} - ) + expected = xr.Dataset({ + "a": 1, + "b": ("x", [1, 3]), + "c": (("x", "y"), [[0, 3], [0, 7]]), + }) assert_identical(expected, data.cumsum()) @pytest.mark.parametrize( @@ -6185,9 +6154,9 @@ def test_filter_by_attrs(self) -> None: assert not bool(new_ds.data_vars) def test_binary_op_propagate_indexes(self) -> None: - ds = Dataset( - {"d1": DataArray([1, 2, 3], dims=["x"], coords={"x": [10, 20, 30]})} - ) + ds = Dataset({ + "d1": DataArray([1, 2, 3], dims=["x"], coords={"x": [10, 20, 30]}) + }) expected = ds.xindexes["x"] actual = (ds * 2).xindexes["x"] assert expected is actual @@ -6300,32 +6269,26 @@ def test_combine_first(self) -> None: assert_equal(actual, expected) def test_sortby(self) -> None: - ds = Dataset( - { - "A": DataArray( - [[1, 2], [3, 4], [5, 6]], [("x", ["c", "b", "a"]), ("y", [1, 0])] - ), - "B": DataArray([[5, 6], [7, 8], [9, 10]], dims=["x", "y"]), - } - ) + ds = Dataset({ + "A": DataArray( + [[1, 2], [3, 4], [5, 6]], [("x", ["c", "b", "a"]), ("y", [1, 0])] + ), + "B": DataArray([[5, 6], [7, 8], [9, 10]], dims=["x", "y"]), + }) - sorted1d = Dataset( - { - "A": DataArray( - [[5, 6], [3, 4], [1, 2]], [("x", ["a", "b", "c"]), ("y", [1, 0])] - ), - "B": DataArray([[9, 10], [7, 8], [5, 6]], dims=["x", "y"]), - } - ) + sorted1d = Dataset({ + "A": DataArray( + [[5, 6], [3, 4], [1, 2]], [("x", ["a", "b", "c"]), ("y", [1, 0])] + ), + "B": DataArray([[9, 10], [7, 8], [5, 6]], dims=["x", "y"]), + }) - sorted2d = Dataset( - { - "A": DataArray( - [[6, 5], [4, 3], [2, 1]], [("x", ["a", "b", "c"]), ("y", [0, 1])] - ), - "B": DataArray([[10, 9], [8, 7], [6, 5]], dims=["x", "y"]), - } - ) + sorted2d = Dataset({ + "A": DataArray( + [[6, 5], [4, 3], [2, 1]], [("x", ["a", "b", "c"]), ("y", [0, 1])] + ), + "B": DataArray([[10, 9], [8, 7], [6, 5]], dims=["x", "y"]), + }) expected = sorted1d dax = DataArray([100, 99, 98], [("x", ["c", "b", "a"])]) @@ -6370,27 +6333,23 @@ def test_sortby(self) -> None: # test pandas.MultiIndex indices = (("b", 1), ("b", 0), ("a", 1), ("a", 0)) midx = pd.MultiIndex.from_tuples(indices, names=["one", "two"]) - ds_midx = Dataset( - { - "A": DataArray( - [[1, 2], [3, 4], [5, 6], [7, 8]], [("x", midx), ("y", [1, 0])] - ), - "B": DataArray([[5, 6], [7, 8], [9, 10], [11, 12]], dims=["x", "y"]), - } - ) + ds_midx = Dataset({ + "A": DataArray( + [[1, 2], [3, 4], [5, 6], [7, 8]], [("x", midx), ("y", [1, 0])] + ), + "B": DataArray([[5, 6], [7, 8], [9, 10], [11, 12]], dims=["x", "y"]), + }) actual = ds_midx.sortby("x") midx_reversed = pd.MultiIndex.from_tuples( tuple(reversed(indices)), names=["one", "two"] ) - expected = Dataset( - { - "A": DataArray( - [[7, 8], [5, 6], [3, 4], [1, 2]], - [("x", midx_reversed), ("y", [1, 0])], - ), - "B": DataArray([[11, 12], [9, 10], [7, 8], [5, 6]], dims=["x", "y"]), - } - ) + expected = Dataset({ + "A": DataArray( + [[7, 8], [5, 6], [3, 4], [1, 2]], + [("x", midx_reversed), ("y", [1, 0])], + ), + "B": DataArray([[11, 12], [9, 10], [7, 8], [5, 6]], dims=["x", "y"]), + }) assert_equal(actual, expected) # multi-dim sort by coordinate objects diff --git a/xarray/tests/test_distributed.py b/xarray/tests/test_distributed.py index d223bce2098..2be27ab66bd 100644 --- a/xarray/tests/test_distributed.py +++ b/xarray/tests/test_distributed.py @@ -1,4 +1,4 @@ -""" isort:skip_file """ +"""isort:skip_file""" from __future__ import annotations diff --git a/xarray/tests/test_duck_array_ops.py b/xarray/tests/test_duck_array_ops.py index df1ab1f40f9..bd9199bfc87 100644 --- a/xarray/tests/test_duck_array_ops.py +++ b/xarray/tests/test_duck_array_ops.py @@ -46,20 +46,18 @@ class TestOps: @pytest.fixture(autouse=True) def setUp(self): - self.x = array( + self.x = array([ [ - [ - [nan, nan, 2.0, nan], - [nan, 5.0, 6.0, nan], - [8.0, 9.0, 10.0, nan], - ], - [ - [nan, 13.0, 14.0, 15.0], - [nan, 17.0, 18.0, nan], - [nan, 21.0, nan, nan], - ], - ] - ) + [nan, nan, 2.0, nan], + [nan, 5.0, 6.0, nan], + [8.0, 9.0, 10.0, nan], + ], + [ + [nan, 13.0, 14.0, 15.0], + [nan, 17.0, 18.0, nan], + [nan, 21.0, nan, nan], + ], + ]) def test_first(self): expected_results = [ @@ -388,12 +386,10 @@ def test_cftime_datetime_mean(dask): def test_mean_over_non_time_dim_of_dataset_with_dask_backed_cftime_data(): # Regression test for part two of GH issue 5897: averaging over a non-time # dimension still fails if the time variable is dask-backed. - ds = Dataset( - { - "var1": (("time",), cftime_range("2021-10-31", periods=10, freq="D")), - "var2": (("x",), list(range(10))), - } - ) + ds = Dataset({ + "var1": (("time",), cftime_range("2021-10-31", periods=10, freq="D")), + "var2": (("x",), list(range(10))), + }) expected = ds.mean("x") result = ds.chunk({}).mean("x") assert_equal(result, expected) @@ -403,22 +399,20 @@ def test_mean_over_non_time_dim_of_dataset_with_dask_backed_cftime_data(): def test_cftime_datetime_mean_long_time_period(): import cftime - times = np.array( + times = np.array([ [ - [ - cftime.DatetimeNoLeap(400, 12, 31, 0, 0, 0, 0), - cftime.DatetimeNoLeap(520, 12, 31, 0, 0, 0, 0), - ], - [ - cftime.DatetimeNoLeap(520, 12, 31, 0, 0, 0, 0), - cftime.DatetimeNoLeap(640, 12, 31, 0, 0, 0, 0), - ], - [ - cftime.DatetimeNoLeap(640, 12, 31, 0, 0, 0, 0), - cftime.DatetimeNoLeap(760, 12, 31, 0, 0, 0, 0), - ], - ] - ) + cftime.DatetimeNoLeap(400, 12, 31, 0, 0, 0, 0), + cftime.DatetimeNoLeap(520, 12, 31, 0, 0, 0, 0), + ], + [ + cftime.DatetimeNoLeap(520, 12, 31, 0, 0, 0, 0), + cftime.DatetimeNoLeap(640, 12, 31, 0, 0, 0, 0), + ], + [ + cftime.DatetimeNoLeap(640, 12, 31, 0, 0, 0, 0), + cftime.DatetimeNoLeap(760, 12, 31, 0, 0, 0, 0), + ], + ]) da = DataArray(times, dims=["time", "d2"]) result = da.mean("d2") @@ -559,9 +553,9 @@ def test_argmin_max(dim_num, dtype, contains_nan, dask, func, skipna, aggdim): with warnings.catch_warnings(): warnings.filterwarnings("ignore", "All-NaN slice") - actual = da.isel( - **{aggdim: getattr(da, "arg" + func)(dim=aggdim, skipna=skipna).compute()} - ) + actual = da.isel(**{ + aggdim: getattr(da, "arg" + func)(dim=aggdim, skipna=skipna).compute() + }) expected = getattr(da, func)(dim=aggdim, skipna=skipna) assert_allclose( actual.drop_vars(list(actual.coords)), diff --git a/xarray/tests/test_formatting.py b/xarray/tests/test_formatting.py index 288a8ed6901..66cbc3b97ac 100644 --- a/xarray/tests/test_formatting.py +++ b/xarray/tests/test_formatting.py @@ -832,12 +832,10 @@ def test_empty_cftimeindex_repr() -> None: def test_display_nbytes() -> None: - xds = xr.Dataset( - { - "foo": np.arange(1200, dtype=np.int16), - "bar": np.arange(111, dtype=np.int16), - } - ) + xds = xr.Dataset({ + "foo": np.arange(1200, dtype=np.int16), + "bar": np.arange(111, dtype=np.int16), + }) # Note: int16 is used to ensure that dtype is shown in the # numpy array representation for all OSes included Windows diff --git a/xarray/tests/test_groupby.py b/xarray/tests/test_groupby.py index d927550e424..2c88aa1e4f2 100644 --- a/xarray/tests/test_groupby.py +++ b/xarray/tests/test_groupby.py @@ -819,14 +819,12 @@ def test_groupby_dataset_errors() -> None: def test_groupby_dataset_reduce() -> None: - data = Dataset( - { - "xy": (["x", "y"], np.random.randn(3, 4)), - "xonly": ("x", np.random.randn(3)), - "yonly": ("y", np.random.randn(4)), - "letters": ("y", ["a", "a", "b", "b"]), - } - ) + data = Dataset({ + "xy": (["x", "y"], np.random.randn(3, 4)), + "xonly": ("x", np.random.randn(3)), + "yonly": ("y", np.random.randn(4)), + "letters": ("y", ["a", "a", "b", "b"]), + }) expected = data.mean("y") expected["yonly"] = expected["yonly"].variable.set_dims({"x": 3}) @@ -837,13 +835,11 @@ def test_groupby_dataset_reduce() -> None: assert_allclose(expected, actual) letters = data["letters"] - expected = Dataset( - { - "xy": data["xy"].groupby(letters).mean(...), - "xonly": (data["xonly"].mean().variable.set_dims({"letters": 2})), - "yonly": data["yonly"].groupby(letters).mean(), - } - ) + expected = Dataset({ + "xy": data["xy"].groupby(letters).mean(...), + "xonly": (data["xonly"].mean().variable.set_dims({"letters": 2})), + "yonly": data["yonly"].groupby(letters).mean(), + }) actual = data.groupby("letters").mean(...) assert_allclose(expected, actual) @@ -897,36 +893,30 @@ def test_groupby_math_more() -> None: with pytest.raises(TypeError, match=r"in-place operations"): ds += grouped # type: ignore[arg-type] - ds = Dataset( - { - "x": ("time", np.arange(100)), - "time": pd.date_range("2000-01-01", periods=100), - } - ) + ds = Dataset({ + "x": ("time", np.arange(100)), + "time": pd.date_range("2000-01-01", periods=100), + }) with pytest.raises(ValueError, match=r"incompat.* grouped binary"): ds + ds.groupby("time.month") def test_groupby_math_bitshift() -> None: # create new dataset of int's only - ds = Dataset( - { - "x": ("index", np.ones(4, dtype=int)), - "y": ("index", np.ones(4, dtype=int) * -1), - "level": ("index", [1, 1, 2, 2]), - "index": [0, 1, 2, 3], - } - ) + ds = Dataset({ + "x": ("index", np.ones(4, dtype=int)), + "y": ("index", np.ones(4, dtype=int) * -1), + "level": ("index", [1, 1, 2, 2]), + "index": [0, 1, 2, 3], + }) shift = DataArray([1, 2, 1], [("level", [1, 2, 8])]) - left_expected = Dataset( - { - "x": ("index", [2, 2, 4, 4]), - "y": ("index", [-2, -2, -4, -4]), - "level": ("index", [2, 2, 8, 8]), - "index": [0, 1, 2, 3], - } - ) + left_expected = Dataset({ + "x": ("index", [2, 2, 4, 4]), + "y": ("index", [-2, -2, -4, -4]), + "level": ("index", [2, 2, 8, 8]), + "index": [0, 1, 2, 3], + }) left_manual = [] for lev, group in ds.groupby("level"): @@ -938,14 +928,12 @@ def test_groupby_math_bitshift() -> None: left_actual = (ds.groupby("level") << shift).reset_coords(names="level") assert_equal(left_expected, left_actual) - right_expected = Dataset( - { - "x": ("index", [0, 0, 2, 2]), - "y": ("index", [-1, -1, -2, -2]), - "level": ("index", [0, 0, 4, 4]), - "index": [0, 1, 2, 3], - } - ) + right_expected = Dataset({ + "x": ("index", [0, 0, 2, 2]), + "y": ("index", [-1, -1, -2, -2]), + "level": ("index", [0, 0, 4, 4]), + "index": [0, 1, 2, 3], + }) right_manual = [] for lev, group in left_expected.groupby("level"): shifter = shift.sel(level=lev) @@ -1233,21 +1221,17 @@ def test_groupby_sum(self): array = self.da grouped = array.groupby("abc") - expected_sum_all = Dataset( - { - "foo": Variable( - ["abc"], - np.array( - [ - self.x[:, :9].sum(), - self.x[:, 10:].sum(), - self.x[:, 9:10].sum(), - ] - ).T, - ), - "abc": Variable(["abc"], np.array(["a", "b", "c"])), - } - )["foo"] + expected_sum_all = Dataset({ + "foo": Variable( + ["abc"], + np.array([ + self.x[:, :9].sum(), + self.x[:, 10:].sum(), + self.x[:, 9:10].sum(), + ]).T, + ), + "abc": Variable(["abc"], np.array(["a", "b", "c"])), + })["foo"] assert_allclose(expected_sum_all, grouped.reduce(np.sum, dim=...)) assert_allclose(expected_sum_all, grouped.sum(...)) @@ -1264,21 +1248,17 @@ def test_groupby_sum(self): actual = array["y"].groupby("abc").sum(...) assert_allclose(expected, actual) - expected_sum_axis1 = Dataset( - { - "foo": ( - ["x", "abc"], - np.array( - [ - self.x[:, :9].sum(1), - self.x[:, 10:].sum(1), - self.x[:, 9:10].sum(1), - ] - ).T, - ), - "abc": Variable(["abc"], np.array(["a", "b", "c"])), - } - )["foo"] + expected_sum_axis1 = Dataset({ + "foo": ( + ["x", "abc"], + np.array([ + self.x[:, :9].sum(1), + self.x[:, 10:].sum(1), + self.x[:, 9:10].sum(1), + ]).T, + ), + "abc": Variable(["abc"], np.array(["a", "b", "c"])), + })["foo"] assert_allclose(expected_sum_axis1, grouped.reduce(np.sum, "y")) assert_allclose(expected_sum_axis1, grouped.sum("y")) @@ -1288,21 +1268,17 @@ def test_groupby_reductions(self, method): grouped = array.groupby("abc") reduction = getattr(np, method) - expected = Dataset( - { - "foo": Variable( - ["x", "abc"], - np.array( - [ - reduction(self.x[:, :9], axis=-1), - reduction(self.x[:, 10:], axis=-1), - reduction(self.x[:, 9:10], axis=-1), - ] - ).T, - ), - "abc": Variable(["abc"], np.array(["a", "b", "c"])), - } - )["foo"] + expected = Dataset({ + "foo": Variable( + ["x", "abc"], + np.array([ + reduction(self.x[:, :9], axis=-1), + reduction(self.x[:, 10:], axis=-1), + reduction(self.x[:, 9:10], axis=-1), + ]).T, + ), + "abc": Variable(["abc"], np.array(["a", "b", "c"])), + })["foo"] with xr.set_options(use_flox=False): actual_legacy = getattr(grouped, method)(dim="y") @@ -1359,9 +1335,11 @@ def center(x): grouped = array.groupby("abc") expected_ds = array.to_dataset() - exp_data = np.hstack( - [center(self.x[:, :9]), center(self.x[:, 9:10]), center(self.x[:, 10:])] - ) + exp_data = np.hstack([ + center(self.x[:, :9]), + center(self.x[:, 9:10]), + center(self.x[:, 10:]), + ]) expected_ds["foo"] = (["x", "y"], exp_data) expected_centered = expected_ds["foo"] assert_allclose(expected_centered, grouped.map(center)) @@ -1817,9 +1795,11 @@ def test_resample_first(self): # regression test for http://stackoverflow.com/questions/33158558/ array = Dataset({"time": times})["time"] actual = array.resample(time="1D").last() - expected_times = pd.to_datetime( - ["2000-01-01T18", "2000-01-02T18", "2000-01-03T06"] - ) + expected_times = pd.to_datetime([ + "2000-01-01T18", + "2000-01-02T18", + "2000-01-03T06", + ]) expected = DataArray(expected_times, [("time", times[::4])], name="time") assert_identical(expected, actual) @@ -2032,9 +2012,10 @@ def test_upsample_interpolate_bug_2197(self): dates = pd.date_range("2007-02-01", "2007-03-01", freq="D") da = xr.DataArray(np.arange(len(dates)), [("time", dates)]) result = da.resample(time="ME").interpolate("linear") - expected_times = np.array( - [np.datetime64("2007-02-28"), np.datetime64("2007-03-31")] - ) + expected_times = np.array([ + np.datetime64("2007-02-28"), + np.datetime64("2007-03-31"), + ]) expected = xr.DataArray([27.0, np.nan], [("time", expected_times)]) assert_equal(result, expected) @@ -2161,13 +2142,11 @@ def test_resample_invalid_loffset(self) -> None: class TestDatasetResample: def test_resample_and_first(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) actual = ds.resample(time="1D").first(keep_attrs=True) expected = ds.isel(time=[0, 4, 8]) @@ -2187,13 +2166,11 @@ def test_resample_and_first(self): def test_resample_min_count(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) # inject nan ds["foo"] = xr.where(ds["foo"] > 2.0, np.nan, ds["foo"]) @@ -2209,13 +2186,11 @@ def test_resample_min_count(self): def test_resample_by_mean_with_keep_attrs(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) ds.attrs["dsmeta"] = "dsdata" resampled_ds = ds.resample(time="1D").mean(keep_attrs=True) @@ -2229,24 +2204,20 @@ def test_resample_by_mean_with_keep_attrs(self): def test_resample_loffset(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) ds.attrs["dsmeta"] = "dsdata" def test_resample_by_mean_discarding_attrs(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) ds.attrs["dsmeta"] = "dsdata" resampled_ds = ds.resample(time="1D").mean(keep_attrs=False) @@ -2256,13 +2227,11 @@ def test_resample_by_mean_discarding_attrs(self): def test_resample_by_last_discarding_attrs(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) ds.attrs["dsmeta"] = "dsdata" resampled_ds = ds.resample(time="1D").last(keep_attrs=False) @@ -2299,13 +2268,11 @@ def test_resample_drop_nondim_coords(self): def test_resample_old_api(self): times = pd.date_range("2000-01-01", freq="6h", periods=10) - ds = Dataset( - { - "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), - "bar": ("time", np.random.randn(10), {"meta": "data"}), - "time": times, - } - ) + ds = Dataset({ + "foo": (["time", "x", "y"], np.random.randn(10, 5, 3)), + "bar": ("time", np.random.randn(10), {"meta": "data"}), + "time": times, + }) with pytest.raises(TypeError, match=r"resample\(\) no longer supports"): ds.resample("1D", "time") @@ -2318,13 +2285,11 @@ def test_resample_old_api(self): def test_resample_ds_da_are_the_same(self): time = pd.date_range("2000-01-01", freq="6h", periods=365 * 4) - ds = xr.Dataset( - { - "foo": (("time", "x"), np.random.randn(365 * 4, 5)), - "time": time, - "x": np.arange(5), - } - ) + ds = xr.Dataset({ + "foo": (("time", "x"), np.random.randn(365 * 4, 5)), + "time": time, + "x": np.arange(5), + }) assert_allclose( ds.resample(time="ME").mean()["foo"], ds.foo.resample(time="ME").mean() ) diff --git a/xarray/tests/test_indexing.py b/xarray/tests/test_indexing.py index 10192b6587a..ba72229709c 100644 --- a/xarray/tests/test_indexing.py +++ b/xarray/tests/test_indexing.py @@ -533,9 +533,10 @@ def test_vectorized_indexer() -> None: check_array1d(indexing.VectorizedIndexer) check_array2d(indexing.VectorizedIndexer) with pytest.raises(ValueError, match=r"numbers of dimensions"): - indexing.VectorizedIndexer( - (np.array(1, dtype=np.int64), np.arange(5, dtype=np.int64)) - ) + indexing.VectorizedIndexer(( + np.array(1, dtype=np.int64), + np.arange(5, dtype=np.int64), + )) class Test_vectorized_indexer: @@ -752,9 +753,11 @@ def test_create_mask_vectorized_indexer() -> None: actual = indexing.create_mask(indexer, (5,)) np.testing.assert_array_equal(expected, actual) - indexer = indexing.VectorizedIndexer( - (np.array([0, -1, 2]), slice(None), np.array([0, 1, -1])) - ) + indexer = indexing.VectorizedIndexer(( + np.array([0, -1, 2]), + slice(None), + np.array([0, 1, -1]), + )) expected = np.array([[False, True, True]] * 2).T actual = indexing.create_mask(indexer, (5, 2)) np.testing.assert_array_equal(expected, actual) @@ -781,9 +784,11 @@ def test_create_mask_dask() -> None: assert actual.chunks == ((1, 1), (2, 1)) np.testing.assert_array_equal(expected, actual) - indexer_vec = indexing.VectorizedIndexer( - (np.array([0, -1, 2]), slice(None), np.array([0, 1, -1])) - ) + indexer_vec = indexing.VectorizedIndexer(( + np.array([0, -1, 2]), + slice(None), + np.array([0, 1, -1]), + )) expected = np.array([[False, True, True]] * 2).T actual = indexing.create_mask( indexer_vec, (5, 2), da.empty((3, 2), chunks=((3,), (2,))) diff --git a/xarray/tests/test_interp.py b/xarray/tests/test_interp.py index a7644ac9d2b..494114976e6 100644 --- a/xarray/tests/test_interp.py +++ b/xarray/tests/test_interp.py @@ -594,9 +594,10 @@ def test_interp_like() -> None: [ (pd.date_range("2000-01-02", periods=3), [1, 2, 3]), ( - np.array( - [np.datetime64("2000-01-01T12:00"), np.datetime64("2000-01-02T12:00")] - ), + np.array([ + np.datetime64("2000-01-01T12:00"), + np.datetime64("2000-01-02T12:00"), + ]), [0.5, 1.5], ), (["2000-01-01T12:00", "2000-01-02T12:00"], [0.5, 1.5]), @@ -766,9 +767,10 @@ def test_decompose(method: InterpOptions) -> None: assert x_broadcast.ndim == 2 actual = da.interp(x=x_new, y=y_new, method=method).drop_vars(("x", "y")) - expected = da.interp(x=x_broadcast, y=y_broadcast, method=method).drop_vars( - ("x", "y") - ) + expected = da.interp(x=x_broadcast, y=y_broadcast, method=method).drop_vars(( + "x", + "y", + )) assert_allclose(actual, expected) diff --git a/xarray/tests/test_merge.py b/xarray/tests/test_merge.py index c6597d5abb0..e5cfa134168 100644 --- a/xarray/tests/test_merge.py +++ b/xarray/tests/test_merge.py @@ -12,20 +12,23 @@ class TestMergeInternals: def test_broadcast_dimension_size(self): - actual = merge.broadcast_dimension_size( - [xr.Variable("x", [1]), xr.Variable("y", [2, 1])] - ) + actual = merge.broadcast_dimension_size([ + xr.Variable("x", [1]), + xr.Variable("y", [2, 1]), + ]) assert actual == {"x": 1, "y": 2} - actual = merge.broadcast_dimension_size( - [xr.Variable(("x", "y"), [[1, 2]]), xr.Variable("y", [2, 1])] - ) + actual = merge.broadcast_dimension_size([ + xr.Variable(("x", "y"), [[1, 2]]), + xr.Variable("y", [2, 1]), + ]) assert actual == {"x": 1, "y": 2} with pytest.raises(ValueError): - merge.broadcast_dimension_size( - [xr.Variable(("x", "y"), [[1, 2]]), xr.Variable("y", [2])] - ) + merge.broadcast_dimension_size([ + xr.Variable(("x", "y"), [[1, 2]]), + xr.Variable("y", [2]), + ]) class TestMergeFunction: diff --git a/xarray/tests/test_namedarray.py b/xarray/tests/test_namedarray.py index 20652f4cc3b..2a3faf32b85 100644 --- a/xarray/tests/test_namedarray.py +++ b/xarray/tests/test_namedarray.py @@ -391,8 +391,7 @@ def _new( dims: _DimsLike | Default = ..., data: duckarray[Any, _DType] = ..., attrs: _AttrsLike | Default = ..., - ) -> Variable[Any, _DType]: - ... + ) -> Variable[Any, _DType]: ... @overload def _new( @@ -400,8 +399,7 @@ def _new( dims: _DimsLike | Default = ..., data: Default = ..., attrs: _AttrsLike | Default = ..., - ) -> Variable[_ShapeType_co, _DType_co]: - ... + ) -> Variable[_ShapeType_co, _DType_co]: ... def _new( self, @@ -454,8 +452,7 @@ def _new( dims: _DimsLike | Default = ..., data: duckarray[Any, _DType] = ..., attrs: _AttrsLike | Default = ..., - ) -> Variable[Any, _DType]: - ... + ) -> Variable[Any, _DType]: ... @overload def _new( @@ -463,8 +460,7 @@ def _new( dims: _DimsLike | Default = ..., data: Default = ..., attrs: _AttrsLike | Default = ..., - ) -> Variable[_ShapeType_co, _DType_co]: - ... + ) -> Variable[_ShapeType_co, _DType_co]: ... def _new( self, diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index 6f983a121fe..ce25462796e 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -580,22 +580,18 @@ def test__infer_interval_breaks_logscale_invalid_coords(self) -> None: def test_geo_data(self) -> None: # Regression test for gh2250 # Realistic coordinates taken from the example dataset - lat = np.array( - [ - [16.28, 18.48, 19.58, 19.54, 18.35], - [28.07, 30.52, 31.73, 31.68, 30.37], - [39.65, 42.27, 43.56, 43.51, 42.11], - [50.52, 53.22, 54.55, 54.50, 53.06], - ] - ) - lon = np.array( - [ - [-126.13, -113.69, -100.92, -88.04, -75.29], - [-129.27, -115.62, -101.54, -87.32, -73.26], - [-133.10, -118.00, -102.31, -86.42, -70.76], - [-137.85, -120.99, -103.28, -85.28, -67.62], - ] - ) + lat = np.array([ + [16.28, 18.48, 19.58, 19.54, 18.35], + [28.07, 30.52, 31.73, 31.68, 30.37], + [39.65, 42.27, 43.56, 43.51, 42.11], + [50.52, 53.22, 54.55, 54.50, 53.06], + ]) + lon = np.array([ + [-126.13, -113.69, -100.92, -88.04, -75.29], + [-129.27, -115.62, -101.54, -87.32, -73.26], + [-133.10, -118.00, -102.31, -86.42, -70.76], + [-137.85, -120.99, -103.28, -85.28, -67.62], + ]) data = np.sqrt(lon**2 + lat**2) da = DataArray( data, @@ -1687,9 +1683,9 @@ def test_cmap_and_color_both(self) -> None: def test_2d_coord_with_interval(self) -> None: for dim in self.darray.dims: - gp = self.darray.groupby_bins(dim, range(15), restore_coord_dims=True).mean( - [dim] - ) + gp = self.darray.groupby_bins(dim, range(15), restore_coord_dims=True).mean([ + dim + ]) for kind in ["imshow", "pcolormesh", "contourf", "contour"]: getattr(gp.plot, kind)() @@ -2411,12 +2407,12 @@ class TestFacetGrid4d(PlotTestCase): def setUp(self) -> None: a = easy_array((10, 15, 3, 2)) darray = DataArray(a, dims=["y", "x", "col", "row"]) - darray.coords["col"] = np.array( - ["col" + str(x) for x in darray.coords["col"].values] - ) - darray.coords["row"] = np.array( - ["row" + str(x) for x in darray.coords["row"].values] - ) + darray.coords["col"] = np.array([ + "col" + str(x) for x in darray.coords["col"].values + ]) + darray.coords["row"] = np.array([ + "row" + str(x) for x in darray.coords["row"].values + ]) self.darray = darray diff --git a/xarray/tests/test_plugins.py b/xarray/tests/test_plugins.py index b518c973d3a..c3badd9b6cb 100644 --- a/xarray/tests/test_plugins.py +++ b/xarray/tests/test_plugins.py @@ -163,16 +163,10 @@ def test_build_engines() -> None: mock.MagicMock(return_value=DummyBackendEntrypoint1), ) def test_build_engines_sorted() -> None: - dummy_pkg_entrypoints = EntryPoints( - [ - EntryPoint( - "dummy2", "xarray.tests.test_plugins:backend_1", "xarray.backends" - ), - EntryPoint( - "dummy1", "xarray.tests.test_plugins:backend_1", "xarray.backends" - ), - ] - ) + dummy_pkg_entrypoints = EntryPoints([ + EntryPoint("dummy2", "xarray.tests.test_plugins:backend_1", "xarray.backends"), + EntryPoint("dummy1", "xarray.tests.test_plugins:backend_1", "xarray.backends"), + ]) backend_entrypoints = list(plugins.build_engines(dummy_pkg_entrypoints)) indices = [] diff --git a/xarray/tests/test_rolling.py b/xarray/tests/test_rolling.py index 79a5ba0a667..cbafb9398a0 100644 --- a/xarray/tests/test_rolling.py +++ b/xarray/tests/test_rolling.py @@ -339,9 +339,10 @@ def test_ndrolling_reduce( if name in ["mean"]: # test our reimplementation of nanmean using np.nanmean - expected = getattr(rolling_obj.construct({"time": "tw", "x": "xw"}), name)( - ["tw", "xw"] - ) + expected = getattr(rolling_obj.construct({"time": "tw", "x": "xw"}), name)([ + "tw", + "xw", + ]) count = rolling_obj.count() if min_periods is None: min_periods = 1 @@ -632,13 +633,11 @@ def test_rolling_wrapped_bottleneck( @pytest.mark.parametrize("min_periods", (None, 1, 2, 3)) @pytest.mark.parametrize("window", (1, 2, 3, 4)) def test_rolling_pandas_compat(self, center, window, min_periods) -> None: - df = pd.DataFrame( - { - "x": np.random.randn(20), - "y": np.random.randn(20), - "time": np.linspace(0, 1, 20), - } - ) + df = pd.DataFrame({ + "x": np.random.randn(20), + "y": np.random.randn(20), + "time": np.linspace(0, 1, 20), + }) ds = Dataset.from_dataframe(df) if min_periods is not None and window < min_periods: @@ -655,13 +654,11 @@ def test_rolling_pandas_compat(self, center, window, min_periods) -> None: @pytest.mark.parametrize("center", (True, False)) @pytest.mark.parametrize("window", (1, 2, 3, 4)) def test_rolling_construct(self, center: bool, window: int) -> None: - df = pd.DataFrame( - { - "x": np.random.randn(20), - "y": np.random.randn(20), - "time": np.linspace(0, 1, 20), - } - ) + df = pd.DataFrame({ + "x": np.random.randn(20), + "y": np.random.randn(20), + "time": np.linspace(0, 1, 20), + }) ds = Dataset.from_dataframe(df) df_rolling = df.rolling(window, center=center, min_periods=1).mean() @@ -681,13 +678,11 @@ def test_rolling_construct(self, center: bool, window: int) -> None: @pytest.mark.parametrize("center", (True, False)) @pytest.mark.parametrize("window", (1, 2, 3, 4)) def test_rolling_construct_stride(self, center: bool, window: int) -> None: - df = pd.DataFrame( - { - "x": np.random.randn(20), - "y": np.random.randn(20), - "time": np.linspace(0, 1, 20), - } - ) + df = pd.DataFrame({ + "x": np.random.randn(20), + "y": np.random.randn(20), + "time": np.linspace(0, 1, 20), + }) ds = Dataset.from_dataframe(df) df_rolling_mean = df.rolling(window, center=center, min_periods=1).mean() diff --git a/xarray/tests/test_units.py b/xarray/tests/test_units.py index 2f11fe688b7..71cbfb8ac4f 100644 --- a/xarray/tests/test_units.py +++ b/xarray/tests/test_units.py @@ -780,9 +780,10 @@ def test_combine_by_coords(variant, unit, error, dtype): units = extract_units(ds) expected = attach_units( - xr.combine_by_coords( - [strip_units(ds), strip_units(convert_units(other, units))] - ), + xr.combine_by_coords([ + strip_units(ds), + strip_units(convert_units(other, units)), + ]), units, ) actual = xr.combine_by_coords([ds, other]) @@ -883,12 +884,10 @@ def test_combine_nested(variant, unit, error, dtype): units = extract_units(ds1) convert_and_strip = lambda ds: strip_units(convert_units(ds, units)) expected = attach_units( - func( - [ - [strip_units(ds1), convert_and_strip(ds2)], - [convert_and_strip(ds3), convert_and_strip(ds4)], - ] - ), + func([ + [strip_units(ds1), convert_and_strip(ds2)], + [convert_and_strip(ds3), convert_and_strip(ds4)], + ]), units, ) actual = func([[ds1, ds2], [ds3, ds4]]) @@ -1123,9 +1122,11 @@ def test_merge_dataarray(variant, unit, error, dtype): convert_and_strip = lambda arr: strip_units(convert_units(arr, units)) expected = attach_units( - xr.merge( - [convert_and_strip(arr1), convert_and_strip(arr2), convert_and_strip(arr3)] - ), + xr.merge([ + convert_and_strip(arr1), + convert_and_strip(arr2), + convert_and_strip(arr3), + ]), units, ) @@ -1687,14 +1688,12 @@ def test_raw_numpy_methods(self, func, unit, error, dtype): ) def test_missing_value_detection(self, func): array = ( - np.array( - [ - [1.4, 2.3, np.nan, 7.2], - [np.nan, 9.7, np.nan, np.nan], - [2.1, np.nan, np.nan, 4.6], - [9.9, np.nan, 7.2, 9.1], - ] - ) + np.array([ + [1.4, 2.3, np.nan, 7.2], + [np.nan, 9.7, np.nan, np.nan], + [2.1, np.nan, np.nan, 4.6], + [9.9, np.nan, 7.2, 9.1], + ]) * unit_registry.degK ) variable = xr.Variable(("x", "y"), array) @@ -1720,14 +1719,12 @@ def test_missing_value_detection(self, func): def test_missing_value_fillna(self, unit, error): value = 10 array = ( - np.array( - [ - [1.4, 2.3, np.nan, 7.2], - [np.nan, 9.7, np.nan, np.nan], - [2.1, np.nan, np.nan, 4.6], - [9.9, np.nan, 7.2, 9.1], - ] - ) + np.array([ + [1.4, 2.3, np.nan, 7.2], + [np.nan, 9.7, np.nan, np.nan], + [2.1, np.nan, np.nan, 4.6], + [9.9, np.nan, 7.2, 9.1], + ]) * unit_registry.m ) variable = xr.Variable(("x", "y"), array) @@ -2184,14 +2181,12 @@ def test_copy(self, dtype): def test_no_conflicts(self, unit, dtype): base_unit = unit_registry.m array1 = ( - np.array( - [ - [6.3, 0.3, 0.45], - [np.nan, 0.3, 0.3], - [3.7, np.nan, 0.2], - [9.43, 0.3, 0.7], - ] - ) + np.array([ + [6.3, 0.3, 0.45], + [np.nan, 0.3, 0.3], + [3.7, np.nan, 0.2], + [9.43, 0.3, 0.7], + ]) * base_unit ) array2 = np.array([np.nan, 0.3, np.nan]) * unit @@ -2744,14 +2739,12 @@ def test_numpy_methods_with_args(self, func, unit, error, dtype): ) def test_missing_value_detection(self, func, dtype): array = ( - np.array( - [ - [1.4, 2.3, np.nan, 7.2], - [np.nan, 9.7, np.nan, np.nan], - [2.1, np.nan, np.nan, 4.6], - [9.9, np.nan, 7.2, 9.1], - ] - ) + np.array([ + [1.4, 2.3, np.nan, 7.2], + [np.nan, 9.7, np.nan, np.nan], + [2.1, np.nan, np.nan, 4.6], + [9.9, np.nan, 7.2, 9.1], + ]) * unit_registry.degK ) data_array = xr.DataArray(data=array) @@ -4214,26 +4207,22 @@ def test_numpy_methods_with_args(self, func, unit, error, dtype): ) def test_missing_value_detection(self, func, dtype): array1 = ( - np.array( - [ - [1.4, 2.3, np.nan, 7.2], - [np.nan, 9.7, np.nan, np.nan], - [2.1, np.nan, np.nan, 4.6], - [9.9, np.nan, 7.2, 9.1], - ] - ) + np.array([ + [1.4, 2.3, np.nan, 7.2], + [np.nan, 9.7, np.nan, np.nan], + [2.1, np.nan, np.nan, 4.6], + [9.9, np.nan, 7.2, 9.1], + ]) * unit_registry.degK ) array2 = ( - np.array( - [ - [np.nan, 5.7, 12.0, 7.2], - [np.nan, 12.4, np.nan, 4.2], - [9.8, np.nan, 4.6, 1.4], - [7.2, np.nan, 6.3, np.nan], - [8.4, 3.9, np.nan, np.nan], - ] - ) + np.array([ + [np.nan, 5.7, 12.0, 7.2], + [np.nan, 12.4, np.nan, 4.2], + [9.8, np.nan, 4.6, 1.4], + [7.2, np.nan, 6.3, np.nan], + [8.4, 3.9, np.nan, np.nan], + ]) * unit_registry.Pa ) diff --git a/xarray/util/generate_aggregations.py b/xarray/util/generate_aggregations.py index 3462af28663..34b98d61f44 100644 --- a/xarray/util/generate_aggregations.py +++ b/xarray/util/generate_aggregations.py @@ -357,9 +357,9 @@ def generate_method(self, method): ) if method.extra_kwargs: - extra_kwargs = "\n " + "\n ".join( - [kwarg.kwarg for kwarg in method.extra_kwargs if kwarg.kwarg] - ) + extra_kwargs = "\n " + "\n ".join([ + kwarg.kwarg for kwarg in method.extra_kwargs if kwarg.kwarg + ]) else: extra_kwargs = "" diff --git a/xarray/util/print_versions.py b/xarray/util/print_versions.py index 4c715437588..c11bd8b8c30 100755 --- a/xarray/util/print_versions.py +++ b/xarray/util/print_versions.py @@ -39,21 +39,19 @@ def get_sys_info(): try: (sysname, _nodename, release, _version, machine, processor) = platform.uname() - blob.extend( - [ - ("python", sys.version), - ("python-bits", struct.calcsize("P") * 8), - ("OS", f"{sysname}"), - ("OS-release", f"{release}"), - # ("Version", f"{version}"), - ("machine", f"{machine}"), - ("processor", f"{processor}"), - ("byteorder", f"{sys.byteorder}"), - ("LC_ALL", f'{os.environ.get("LC_ALL", "None")}'), - ("LANG", f'{os.environ.get("LANG", "None")}'), - ("LOCALE", f"{locale.getlocale()}"), - ] - ) + blob.extend([ + ("python", sys.version), + ("python-bits", struct.calcsize("P") * 8), + ("OS", f"{sysname}"), + ("OS-release", f"{release}"), + # ("Version", f"{version}"), + ("machine", f"{machine}"), + ("processor", f"{processor}"), + ("byteorder", f"{sys.byteorder}"), + ("LC_ALL", f'{os.environ.get("LC_ALL", "None")}'), + ("LANG", f'{os.environ.get("LANG", "None")}'), + ("LOCALE", f"{locale.getlocale()}"), + ]) except Exception: pass