Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more ruff rules #2460

Merged
merged 14 commits into from
Nov 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -269,19 +269,25 @@ extend-exclude = [
extend-select = [
"ANN", # flake8-annotations
"B", # flake8-bugbear
"EXE", # flake8-executable
"C4", # flake8-comprehensions
"FA", # flake8-future-annotations
"FLY", # flynt
"FURB", # refurb
"G", # flake8-logging-format
"I", # isort
"ISC", # flake8-implicit-str-concat
"LOG", # flake8-logging
"PERF", # Perflint
"PIE", # flake8-pie
"PGH", # pygrep-hooks
"PT", # flake8-pytest-style
"PYI", # flake8-pyi
"RSE", # flake8-raise
"RET", # flake8-return
"RSE", # flake8-raise
"RUF",
"SIM", # flake8-simplify
"SLOT", # flake8-slots
"TCH", # flake8-type-checking
"TRY", # tryceratops
"UP", # pyupgrade
Expand All @@ -298,6 +304,7 @@ ignore = [
"RET505",
"RET506",
"RUF005",
"SIM108",
"TRY003",
"UP027", # deprecated
"UP038", # https://github.com/astral-sh/ruff/issues/7871
Expand All @@ -319,7 +326,7 @@ ignore = [
]

[tool.ruff.lint.extend-per-file-ignores]
"tests/**" = ["ANN001", "ANN201"]
"tests/**" = ["ANN001", "ANN201", "RUF029", "SIM117", "SIM300"]

[tool.mypy]
python_version = "3.11"
Expand Down
1 change: 0 additions & 1 deletion src/zarr/abc/codec.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ def validate(self, *, shape: ChunkCoords, dtype: np.dtype[Any], chunk_grid: Chun
chunk_grid : ChunkGrid
The array chunk grid
"""
...

async def _decode_single(self, chunk_data: CodecOutput, chunk_spec: ArraySpec) -> CodecInput:
raise NotImplementedError
Expand Down
1 change: 0 additions & 1 deletion src/zarr/abc/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,5 @@ def from_dict(cls, data: dict[str, JSON]) -> Self:
"""
Create an instance of the model from a dictionary
"""
...

return cls(**data)
5 changes: 2 additions & 3 deletions src/zarr/api/asynchronous.py
Original file line number Diff line number Diff line change
Expand Up @@ -878,9 +878,8 @@ async def create(
warnings.warn("meta_array is not yet implemented", RuntimeWarning, stacklevel=2)

mode = kwargs.pop("mode", None)
if mode is None:
if not isinstance(store, Store | StorePath):
mode = "a"
if mode is None and not isinstance(store, Store | StorePath):
mode = "a"

store_path = await make_store_path(store, path=path, mode=mode, storage_options=storage_options)

Expand Down
2 changes: 1 addition & 1 deletion src/zarr/codecs/gzip.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
def parse_gzip_level(data: JSON) -> int:
if not isinstance(data, (int)):
raise TypeError(f"Expected int, got {type(data)}")
if data not in range(0, 10):
if data not in range(10):
raise ValueError(
f"Expected an integer from the inclusive range (0, 9). Got {data} instead."
)
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/core/chunk_grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def to_dict(self) -> dict[str, JSON]:

def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]:
return itertools.product(
*(range(0, ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape, strict=False))
*(range(ceildiv(s, c)) for s, c in zip(array_shape, self.chunk_shape, strict=False))
)

def get_nchunks(self, array_shape: ChunkCoords) -> int:
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/core/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -1225,7 +1225,7 @@ def _members_consolidated(

# we kind of just want the top-level keys.
if consolidated_metadata is not None:
for key in consolidated_metadata.metadata.keys():
for key in consolidated_metadata.metadata:
obj = self._getitem_consolidated(
self.store_path, key, prefix=self.name
) # Metadata -> Group/Array
Expand Down
11 changes: 6 additions & 5 deletions src/zarr/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,12 +241,13 @@ def is_pure_fancy_indexing(selection: Any, ndim: int) -> bool:
# is mask selection
return True

if ndim == 1:
if is_integer_list(selection) or is_integer_array(selection) or is_bool_list(selection):
return True
if ndim == 1 and (
is_integer_list(selection) or is_integer_array(selection) or is_bool_list(selection)
):
return True

# if not, we go through the normal path below, because a 1-tuple
# of integers is also allowed.
# if not, we go through the normal path below, because a 1-tuple
# of integers is also allowed.
no_slicing = (
isinstance(selection, tuple)
and len(selection) == ndim
Expand Down
6 changes: 3 additions & 3 deletions src/zarr/core/metadata/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@
T_ArrayMetadata = TypeVar("T_ArrayMetadata", ArrayV2Metadata, ArrayV3Metadata)

__all__ = [
"ArrayV2Metadata",
"ArrayV3Metadata",
"ArrayMetadata",
"ArrayMetadataDict",
"ArrayV3MetadataDict",
"ArrayV2Metadata",
"ArrayV2MetadataDict",
"ArrayV3Metadata",
"ArrayV3MetadataDict",
]
6 changes: 3 additions & 3 deletions src/zarr/core/metadata/v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,9 +481,9 @@ def parse_fill_value(
except (ValueError, OverflowError, TypeError) as e:
raise ValueError(f"fill value {fill_value!r} is not valid for dtype {data_type}") from e
# Check if the value is still representable by the dtype
if fill_value == "NaN" and np.isnan(casted_value):
pass
elif fill_value in ["Infinity", "-Infinity"] and not np.isfinite(casted_value):
if (fill_value == "NaN" and np.isnan(casted_value)) or (
fill_value in ["Infinity", "-Infinity"] and not np.isfinite(casted_value)
):
pass
elif np_dtype.kind == "f":
# float comparison is not exact, especially when dtype <float64
Expand Down
7 changes: 3 additions & 4 deletions src/zarr/core/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,9 @@ def cast_to_string_dtype(
return cast_array(data)
# out = data.astype(STRING_DTYPE, copy=False)
# return cast(np.ndarray[Any, np.dtypes.StringDType | np.dtypes.ObjectDType], out)
if _NUMPY_SUPPORTS_VLEN_STRING:
if np.issubdtype(data.dtype, _STRING_DTYPE):
# already a valid string variable length string dtype
return cast_array(data)
if _NUMPY_SUPPORTS_VLEN_STRING and np.issubdtype(data.dtype, _STRING_DTYPE):
# already a valid string variable length string dtype
return cast_array(data)
if np.issubdtype(data.dtype, np.object_):
# object arrays require more careful handling
if _NUMPY_SUPPORTS_VLEN_STRING:
Expand Down
2 changes: 1 addition & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def test_save(store: Store, n_args: int, n_kwargs: int) -> None:
assert isinstance(group, Group)
for array in group.array_values():
assert_array_equal(array[:], data)
for k in kwargs.keys():
for k in kwargs:
assert k in group
assert group.nmembers() == n_args + n_kwargs

Expand Down
1 change: 0 additions & 1 deletion tests/test_codecs/test_codecs.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def test_sharding_pickle() -> None:
"""
Test that sharding codecs can be pickled
"""
pass


@pytest.mark.parametrize("store", ["local", "memory"], indirect=["store"])
Expand Down
Loading