Skip to content

Commit

Permalink
Apply and enforce more ruff rules (#2053)
Browse files Browse the repository at this point in the history
* Apply ruff/Perflint rule PERF102

PERF102 When using only the keys of a dict use the `keys()` method

* Apply ruff/Perflint rule PERF401

PERF401 Use an async list comprehension to create a transformed list

* Apply ruff/flake8-pytest-style rule PT022

* Fix pre-commit warning

Ignore lint rules conflicting with the ruff formatter

* Apply ruff/pygrep-hooks rule PGH003

PGH003 Use specific rule codes when ignoring type issues

* Apply ruff/pygrep-hooks rule PGH004

PGH004 Use specific rule codes when using `noqa`

* Enforce ruff/pygrep-hooks rules (PGH)

* Apply ruff/flake8-comprehensions rule C417

C417 Unnecessary `map` usage (rewrite using a generator expression)

* Apply ruff/flake8-pyi rule PYI032

PYI032 Prefer `object` to `Any` for the second parameter to `__eq__`

* Apply ruff/flake8-pyi rule PYI036

PYI036 Returning Any from function

* Apply ruff/flake8-pyi rule PYI038

* Apply ruff/flake8-pyi rule PYI041

PYI041 Use `complex` instead of `float | complex`
PYI041 Use `float` instead of `int | float`

* Apply ruff/flake8-pyi rule PYI055

PYI055 Multiple `type` members in a union. Combine them into one.
  • Loading branch information
DimitriPapadopoulos authored Sep 24, 2024
1 parent 5e57f75 commit 4cbb17e
Show file tree
Hide file tree
Showing 28 changed files with 119 additions and 103 deletions.
28 changes: 23 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -207,18 +207,36 @@ extend-exclude = [

[tool.ruff.lint]
extend-select = [
"B", # flake8-bugbear
"I", # isort
"ISC",
"UP", # pyupgrade
"RSE",
"B", # flake8-bugbear
"I", # isort
"ISC", # flake8-implicit-str-concat
"PGH", # pygrep-hooks
"PYI", # flake8-pyi
"RSE", # flake8-raise
"RUF",
"TCH", # flake8-type-checking
"TRY", # tryceratops
"UP", # pyupgrade
]
ignore = [
"PYI013",
"RUF005",
"TRY003",
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
]

[tool.mypy]
Expand Down
10 changes: 8 additions & 2 deletions src/zarr/abc/store.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from abc import ABC, abstractmethod
from asyncio import gather
from collections.abc import AsyncGenerator, Iterable
from types import TracebackType
from typing import Any, NamedTuple, Protocol, runtime_checkable

from typing_extensions import Self
Expand Down Expand Up @@ -35,7 +36,7 @@ class Store(ABC):
_mode: AccessMode
_is_open: bool

def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any):
def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any) -> None:
self._is_open = False
self._mode = AccessMode.from_literal(mode)

Expand All @@ -49,7 +50,12 @@ def __enter__(self) -> Self:
"""Enter a context manager that will close the store upon exiting."""
return self

def __exit__(self, *args: Any) -> None:
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""Close the store."""
self.close()

Expand Down
10 changes: 5 additions & 5 deletions src/zarr/api/asynchronous.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Literal, Union, cast
from typing import TYPE_CHECKING, Any, Literal, cast

import numpy as np
import numpy.typing as npt
Expand All @@ -25,6 +25,10 @@
from zarr.core.buffer import NDArrayLike
from zarr.core.chunk_key_encodings import ChunkKeyEncoding

# TODO: this type could use some more thought
ArrayLike = AsyncArray | Array | npt.NDArray[Any]
PathLike = str

__all__ = [
"consolidate_metadata",
"copy",
Expand Down Expand Up @@ -53,10 +57,6 @@
"zeros_like",
]

# TODO: this type could use some more thought, noqa to avoid "Variable "asynchronous.ArrayLike" is not valid as a type"
ArrayLike = Union[AsyncArray | Array | npt.NDArray[Any]] # noqa
PathLike = str


def _get_shape_chunks(a: ArrayLike | Any) -> tuple[ChunkCoords | None, ChunkCoords | None]:
"""helper function to get the shape and chunks from an array-like object"""
Expand Down
6 changes: 2 additions & 4 deletions src/zarr/codecs/transpose.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,16 +96,14 @@ async def _decode_single(
chunk_spec: ArraySpec,
) -> NDBuffer:
inverse_order = np.argsort(self.order)
chunk_array = chunk_array.transpose(inverse_order)
return chunk_array
return chunk_array.transpose(inverse_order)

async def _encode_single(
self,
chunk_array: NDBuffer,
_chunk_spec: ArraySpec,
) -> NDBuffer | None:
chunk_array = chunk_array.transpose(self.order)
return chunk_array
return chunk_array.transpose(self.order)

def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int:
return input_byte_length
Expand Down
7 changes: 3 additions & 4 deletions src/zarr/core/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def __init__(
metadata: ArrayMetadata,
store_path: StorePath,
order: Literal["C", "F"] | None = None,
):
) -> None:
metadata_parsed = parse_array_metadata(metadata)
order_parsed = parse_indexing_order(order or config.get("array.order"))

Expand Down Expand Up @@ -294,7 +294,7 @@ async def _create_v2(
dtype: npt.DTypeLike,
chunks: ChunkCoords,
dimension_separator: Literal[".", "/"] | None = None,
fill_value: None | int | float = None,
fill_value: None | float = None,
order: Literal["C", "F"] | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
Expand Down Expand Up @@ -331,8 +331,7 @@ def from_dict(
data: dict[str, JSON],
) -> AsyncArray:
metadata = parse_array_metadata(data)
async_array = cls(metadata=metadata, store_path=store_path)
return async_array
return cls(metadata=metadata, store_path=store_path)

@classmethod
async def open(
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/core/attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


class Attributes(MutableMapping[str, JSON]):
def __init__(self, obj: Array | Group):
def __init__(self, obj: Array | Group) -> None:
# key=".zattrs", read_only=False, cache=True, synchronizer=None
self._obj = obj

Expand Down
6 changes: 3 additions & 3 deletions src/zarr/core/buffer/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def ravel(self, order: Literal["K", "A", "C", "F"] = ...) -> Self: ...

def all(self) -> bool: ...

def __eq__(self, other: Any) -> Self: # type: ignore[explicit-override, override]
def __eq__(self, other: object) -> Self: # type: ignore[explicit-override, override]
"""Element-wise equal
Notes
Expand Down Expand Up @@ -136,7 +136,7 @@ class Buffer(ABC):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
if array_like.ndim != 1:
raise ValueError("array_like: only 1-dim allowed")
if array_like.dtype != np.dtype("b"):
Expand Down Expand Up @@ -313,7 +313,7 @@ class NDBuffer:
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
# assert array.ndim > 0
assert array.dtype != object
self._data = array
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class Buffer(core.Buffer):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
super().__init__(array_like)

@classmethod
Expand Down Expand Up @@ -143,7 +143,7 @@ class NDBuffer(core.NDBuffer):
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
super().__init__(array)

@classmethod
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class Buffer(core.Buffer):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
if cp is None:
raise ImportError(
"Cannot use zarr.buffer.gpu.Buffer without cupy. Please install cupy."
Expand Down Expand Up @@ -137,7 +137,7 @@ class NDBuffer(core.NDBuffer):
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
if cp is None:
raise ImportError(
"Cannot use zarr.buffer.gpu.NDBuffer without cupy. Please install cupy."
Expand Down
12 changes: 6 additions & 6 deletions src/zarr/core/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def parse_zarr_format(data: Any) -> ZarrFormat:
def parse_attributes(data: Any) -> dict[str, Any]:
if data is None:
return {}
elif isinstance(data, dict) and all(map(lambda v: isinstance(v, str), data.keys())):
elif isinstance(data, dict) and all(isinstance(k, str) for k in data):
return data
msg = f"Expected dict with string keys. Got {type(data)} instead."
raise TypeError(msg)
Expand Down Expand Up @@ -104,7 +104,9 @@ def to_buffer_dict(self, prototype: BufferPrototype) -> dict[str, Buffer]:
),
}

def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3):
def __init__(
self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3
) -> None:
attributes_parsed = parse_attributes(attributes)
zarr_format_parsed = parse_zarr_format(zarr_format)

Expand Down Expand Up @@ -202,11 +204,10 @@ def from_dict(
store_path: StorePath,
data: dict[str, Any],
) -> AsyncGroup:
group = cls(
return cls(
metadata=GroupMetadata.from_dict(data),
store_path=store_path,
)
return group

async def getitem(
self,
Expand Down Expand Up @@ -888,8 +889,7 @@ def members(self, max_depth: int | None = 0) -> tuple[tuple[str, Array | Group],
"""
_members = self._sync_iter(self._async_group.members(max_depth=max_depth))

result = tuple(map(lambda kv: (kv[0], _parse_async_node(kv[1])), _members))
return result
return tuple((kv[0], _parse_async_node(kv[1])) for kv in _members)

def __contains__(self, member: str) -> bool:
return self._sync(self._async_group.contains(member))
Expand Down
24 changes: 14 additions & 10 deletions src/zarr/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class ArrayIndexError(IndexError):
class BoundsCheckError(IndexError):
_msg = ""

def __init__(self, dim_len: int):
def __init__(self, dim_len: int) -> None:
self._msg = f"index out of bounds for dimension with length {dim_len}"


Expand Down Expand Up @@ -255,7 +255,7 @@ class IntDimIndexer:
dim_chunk_len: int
nitems: int = 1

def __init__(self, dim_sel: int, dim_len: int, dim_chunk_len: int):
def __init__(self, dim_sel: int, dim_len: int, dim_chunk_len: int) -> None:
object.__setattr__(self, "dim_sel", normalize_integer_selection(dim_sel, dim_len))
object.__setattr__(self, "dim_len", dim_len)
object.__setattr__(self, "dim_chunk_len", dim_chunk_len)
Expand All @@ -279,7 +279,7 @@ class SliceDimIndexer:
stop: int
step: int

def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int):
def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int) -> None:
# normalize
start, stop, step = dim_sel.indices(dim_len)
if step < 1:
Expand Down Expand Up @@ -453,7 +453,7 @@ def __init__(
selection: BasicSelection,
shape: ChunkCoords,
chunk_grid: ChunkGrid,
):
) -> None:
chunk_shape = get_chunk_shape(chunk_grid)
# handle ellipsis
selection_normalized = replace_ellipsis(selection, shape)
Expand Down Expand Up @@ -509,7 +509,7 @@ class BoolArrayDimIndexer:
nitems: int
dim_chunk_ixs: npt.NDArray[np.intp]

def __init__(self, dim_sel: npt.NDArray[np.bool_], dim_len: int, dim_chunk_len: int):
def __init__(self, dim_sel: npt.NDArray[np.bool_], dim_len: int, dim_chunk_len: int) -> None:
# check number of dimensions
if not is_bool_array(dim_sel, 1):
raise IndexError("Boolean arrays in an orthogonal selection must be 1-dimensional only")
Expand Down Expand Up @@ -626,7 +626,7 @@ def __init__(
wraparound: bool = True,
boundscheck: bool = True,
order: Order = Order.UNKNOWN,
):
) -> None:
# ensure 1d array
dim_sel = np.asanyarray(dim_sel)
if not is_integer_array(dim_sel, 1):
Expand Down Expand Up @@ -766,7 +766,7 @@ class OrthogonalIndexer(Indexer):
is_advanced: bool
drop_axes: tuple[int, ...]

def __init__(self, selection: Selection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(self, selection: Selection, shape: ChunkCoords, chunk_grid: ChunkGrid) -> None:
chunk_shape = get_chunk_shape(chunk_grid)

# handle ellipsis
Expand Down Expand Up @@ -880,7 +880,9 @@ class BlockIndexer(Indexer):
shape: ChunkCoords
drop_axes: ChunkCoords

def __init__(self, selection: BasicSelection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(
self, selection: BasicSelection, shape: ChunkCoords, chunk_grid: ChunkGrid
) -> None:
chunk_shape = get_chunk_shape(chunk_grid)

# handle ellipsis
Expand Down Expand Up @@ -1005,7 +1007,9 @@ class CoordinateIndexer(Indexer):
chunk_shape: ChunkCoords
drop_axes: ChunkCoords

def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(
self, selection: CoordinateSelection, shape: ChunkCoords, chunk_grid: ChunkGrid
) -> None:
chunk_shape = get_chunk_shape(chunk_grid)

cdata_shape: ChunkCoords
Expand Down Expand Up @@ -1122,7 +1126,7 @@ def __iter__(self) -> Iterator[ChunkProjection]:

@dataclass(frozen=True)
class MaskIndexer(CoordinateIndexer):
def __init__(self, selection: MaskSelection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(self, selection: MaskSelection, shape: ChunkCoords, chunk_grid: ChunkGrid) -> None:
# some initial normalization
selection_normalized = cast(tuple[MaskSelection], ensure_tuple(selection))
selection_normalized = cast(tuple[MaskSelection], replace_lists(selection_normalized))
Expand Down
Loading

0 comments on commit 4cbb17e

Please sign in to comment.