diff --git a/pyproject.toml b/pyproject.toml index 9ba2bf33c2b..77f63426216 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -245,6 +245,7 @@ ignore = [ "E501", "E731", "UP007", + "PERF20", "RUF001", "RUF002", "RUF003", @@ -259,6 +260,7 @@ extend-select = [ "W", "TID", # flake8-tidy-imports (absolute imports) "I", # isort + "PERF", # Perflint "PGH", # pygrep-hooks "RUF", "UP", # Pyupgrade diff --git a/xarray/backends/memory.py b/xarray/backends/memory.py index 9df6701d954..aba767ab731 100644 --- a/xarray/backends/memory.py +++ b/xarray/backends/memory.py @@ -27,11 +27,7 @@ def get_variables(self): return self._variables def get_dimensions(self): - dims = {} - for v in self._variables.values(): - for d, s in v.dims.items(): - dims[d] = s - return dims + return {d: s for v in self._variables.values() for d, s in v.dims.items()} def prepare_variable(self, k, v, *args, **kwargs): new_var = Variable(v.dims, np.empty_like(v), v.attrs) diff --git a/xarray/backends/plugins.py b/xarray/backends/plugins.py index e1ad6c5c4a3..483ce6c425e 100644 --- a/xarray/backends/plugins.py +++ b/xarray/backends/plugins.py @@ -82,7 +82,7 @@ def backends_dict_from_pkg( def set_missing_parameters( backend_entrypoints: dict[str, type[BackendEntrypoint]], ) -> None: - for _, backend in backend_entrypoints.items(): + for backend in backend_entrypoints.values(): if backend.open_dataset_parameters is None: open_dataset = backend.open_dataset backend.open_dataset_parameters = detect_parameters(open_dataset) diff --git a/xarray/core/coordinates.py b/xarray/core/coordinates.py index 29ce5eceb28..4d59a7e94c1 100644 --- a/xarray/core/coordinates.py +++ b/xarray/core/coordinates.py @@ -752,7 +752,7 @@ def _update_coords( # check for inconsistent state *before* modifying anything in-place dims = calculate_dimensions(variables) new_coord_names = set(coords) - for dim, _size in dims.items(): + for dim in dims.keys(): if dim in variables: new_coord_names.add(dim) diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 889384bc068..674fb886fab 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -5606,7 +5606,7 @@ def _unstack_once( new_indexes, clean_index = index.unstack() indexes.update(new_indexes) - for _name, idx in new_indexes.items(): + for idx in new_indexes.values(): variables.update(idx.create_variables(index_vars)) for name, var in self.variables.items(): @@ -5647,7 +5647,7 @@ def _unstack_full_reindex( indexes.update(new_indexes) new_index_variables = {} - for _name, idx in new_indexes.items(): + for idx in new_indexes.values(): new_index_variables.update(idx.create_variables(index_vars)) new_dim_sizes = {k: v.size for k, v in new_index_variables.items()} @@ -9364,10 +9364,11 @@ def pad( # keep indexes that won't be affected by pad and drop all other indexes xindexes = self.xindexes pad_dims = set(pad_width) - indexes = {} - for k, idx in xindexes.items(): - if not pad_dims.intersection(xindexes.get_all_dims(k)): - indexes[k] = idx + indexes = { + k: idx + for k, idx in xindexes.items() + if not pad_dims.intersection(xindexes.get_all_dims(k)) + } for name, var in self.variables.items(): var_pad_width = {k: v for k, v in pad_width.items() if k in var.dims} diff --git a/xarray/core/merge.py b/xarray/core/merge.py index 61c73ca9a7c..6426f741750 100644 --- a/xarray/core/merge.py +++ b/xarray/core/merge.py @@ -710,7 +710,7 @@ def merge_core( coord_names.intersection_update(variables) if explicit_coords is not None: coord_names.update(explicit_coords) - for dim, _size in dims.items(): + for dim in dims.keys(): if dim in variables: coord_names.add(dim) ambiguous_coords = coord_names.intersection(noncoord_names) diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 8aedfb09b3a..7be6eb5ed0d 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -847,7 +847,7 @@ def find_and_validate_array(obj): else: raise TypeError(f"{type(obj.array)} is wrapped by {type(obj)}") - for _k, v in ds.variables.items(): + for v in ds.variables.values(): find_and_validate_array(v._data) def test_array_type_after_indexing(self) -> None: diff --git a/xarray/tests/test_combine.py b/xarray/tests/test_combine.py index b7170a06128..dfd047e692c 100644 --- a/xarray/tests/test_combine.py +++ b/xarray/tests/test_combine.py @@ -29,7 +29,7 @@ def assert_combined_tile_ids_equal(dict1, dict2): assert len(dict1) == len(dict2) - for k, _v in dict1.items(): + for k in dict1.keys(): assert k in dict2.keys() assert_equal(dict1[k], dict2[k]) diff --git a/xarray/tests/test_concat.py b/xarray/tests/test_concat.py index 226f376b581..32ebb0760c0 100644 --- a/xarray/tests/test_concat.py +++ b/xarray/tests/test_concat.py @@ -74,40 +74,38 @@ def create_typed_datasets( num_datasets: int = 2, seed: int | None = None ) -> list[Dataset]: var_strings = ["a", "b", "c", "d", "e", "f", "g", "h"] - result = [] rng = np.random.default_rng(seed) lat = rng.standard_normal(size=(1, 4)) lon = rng.standard_normal(size=(1, 4)) - for i in range(num_datasets): - result.append( - Dataset( - data_vars={ - "float": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))), - "float2": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))), - "string": ( - ["x", "y", "day"], - rng.choice(var_strings, size=(1, 4, 2)), - ), - "int": (["x", "y", "day"], rng.integers(0, 10, size=(1, 4, 2))), - "datetime64": ( - ["x", "y", "day"], - np.arange( - np.datetime64("2017-01-01"), np.datetime64("2017-01-09") - ).reshape(1, 4, 2), - ), - "timedelta64": ( - ["x", "y", "day"], - np.reshape([pd.Timedelta(days=i) for i in range(8)], [1, 4, 2]), - ), - }, - coords={ - "lat": (["x", "y"], lat), - "lon": (["x", "y"], lon), - "day": ["day" + str(i * 2 + 1), "day" + str(i * 2 + 2)], - }, - ) + return [ + Dataset( + data_vars={ + "float": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))), + "float2": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))), + "string": ( + ["x", "y", "day"], + rng.choice(var_strings, size=(1, 4, 2)), + ), + "int": (["x", "y", "day"], rng.integers(0, 10, size=(1, 4, 2))), + "datetime64": ( + ["x", "y", "day"], + np.arange( + np.datetime64("2017-01-01"), np.datetime64("2017-01-09") + ).reshape(1, 4, 2), + ), + "timedelta64": ( + ["x", "y", "day"], + np.reshape([pd.Timedelta(days=i) for i in range(8)], [1, 4, 2]), + ), + }, + coords={ + "lat": (["x", "y"], lat), + "lon": (["x", "y"], lon), + "day": ["day" + str(i * 2 + 1), "day" + str(i * 2 + 2)], + }, ) - return result + for i in range(num_datasets) + ] def test_concat_compat() -> None: diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py index 6b582fc8ea0..d8c4a83b671 100644 --- a/xarray/tests/test_dataset.py +++ b/xarray/tests/test_dataset.py @@ -3036,12 +3036,12 @@ def test_drop_encoding(self) -> None: vencoding = {"scale_factor": 10} orig.encoding = {"foo": "bar"} - for k, _v in orig.variables.items(): + for k in orig.variables.keys(): orig[k].encoding = vencoding actual = orig.drop_encoding() assert actual.encoding == {} - for _k, v in actual.variables.items(): + for v in actual.variables.values(): assert v.encoding == {} assert_equal(actual, orig) diff --git a/xarray/tests/test_duck_array_ops.py b/xarray/tests/test_duck_array_ops.py index da263f1b30e..f9d0141ead3 100644 --- a/xarray/tests/test_duck_array_ops.py +++ b/xarray/tests/test_duck_array_ops.py @@ -390,12 +390,13 @@ def series_reduce(da, func, dim, **kwargs): se = da.to_series() return from_series_or_scalar(getattr(se, func)(**kwargs)) else: - da1 = [] dims = list(da.dims) dims.remove(dim) d = dims[0] - for i in range(len(da[d])): - da1.append(series_reduce(da.isel(**{d: i}), func, dim, **kwargs)) + da1 = [ + series_reduce(da.isel(**{d: i}), func, dim, **kwargs) + for i in range(len(da[d])) + ] if d in da.coords: return concat(da1, dim=da[d]) diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index 7b6741df72f..b2094d7f627 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -122,11 +122,11 @@ def property_in_axes_text( has the property assigned to property_str """ alltxt: list[mpl.text.Text] = ax.findobj(mpl.text.Text) # type: ignore[assignment] - check = [] - for t in alltxt: - if t.get_text() == target_txt: - check.append(plt.getp(t, property) == property_str) - return all(check) + return all( + plt.getp(t, property) == property_str + for t in alltxt + if t.get_text() == target_txt + ) def easy_array(shape: tuple[int, ...], start: float = 0, stop: float = 1) -> np.ndarray: diff --git a/xarray/tests/test_strategies.py b/xarray/tests/test_strategies.py index 397e07a4bea..169f8b199ab 100644 --- a/xarray/tests/test_strategies.py +++ b/xarray/tests/test_strategies.py @@ -73,7 +73,7 @@ def test_restrict_names(self, data): def check_dict_values(dictionary: dict, allowed_attrs_values_types) -> bool: """Helper function to assert that all values in recursive dict match one of a set of types.""" - for _key, value in dictionary.items(): + for value in dictionary.values(): if isinstance(value, allowed_attrs_values_types) or value is None: continue elif isinstance(value, dict):