Skip to content

Commit

Permalink
Apply ruff/flake8-pyi rule PYI055
Browse files Browse the repository at this point in the history
PYI055 Multiple `type` members in a union. Combine them into one.
  • Loading branch information
DimitriPapadopoulos committed Sep 24, 2024
1 parent 0f7f703 commit d90ed39
Show file tree
Hide file tree
Showing 19 changed files with 52 additions and 52 deletions.
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -211,13 +211,15 @@ extend-select = [
"I", # isort
"ISC", # flake8-implicit-str-concat
"PGH", # pygrep-hooks
"PYI", # flake8-pyi
"RSE", # flake8-raise
"RUF",
"TCH", # flake8-type-checking
"TRY", # tryceratops
"UP", # pyupgrade
]
ignore = [
"PYI013",
"RUF005",
"TRY003",
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/abc/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class Store(ABC):
_mode: AccessMode
_is_open: bool

def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any):
def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any) -> None:
self._is_open = False
self._mode = AccessMode.from_literal(mode)

Expand Down
6 changes: 2 additions & 4 deletions src/zarr/codecs/transpose.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,16 +96,14 @@ async def _decode_single(
chunk_spec: ArraySpec,
) -> NDBuffer:
inverse_order = np.argsort(self.order)
chunk_array = chunk_array.transpose(inverse_order)
return chunk_array
return chunk_array.transpose(inverse_order)

async def _encode_single(
self,
chunk_array: NDBuffer,
_chunk_spec: ArraySpec,
) -> NDBuffer | None:
chunk_array = chunk_array.transpose(self.order)
return chunk_array
return chunk_array.transpose(self.order)

def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int:
return input_byte_length
Expand Down
5 changes: 2 additions & 3 deletions src/zarr/core/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def __init__(
metadata: ArrayMetadata,
store_path: StorePath,
order: Literal["C", "F"] | None = None,
):
) -> None:
metadata_parsed = parse_array_metadata(metadata)
order_parsed = parse_indexing_order(order or config.get("array.order"))

Expand Down Expand Up @@ -331,8 +331,7 @@ def from_dict(
data: dict[str, JSON],
) -> AsyncArray:
metadata = parse_array_metadata(data)
async_array = cls(metadata=metadata, store_path=store_path)
return async_array
return cls(metadata=metadata, store_path=store_path)

@classmethod
async def open(
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/core/attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


class Attributes(MutableMapping[str, JSON]):
def __init__(self, obj: Array | Group):
def __init__(self, obj: Array | Group) -> None:
# key=".zattrs", read_only=False, cache=True, synchronizer=None
self._obj = obj

Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ class Buffer(ABC):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
if array_like.ndim != 1:
raise ValueError("array_like: only 1-dim allowed")
if array_like.dtype != np.dtype("b"):
Expand Down Expand Up @@ -313,7 +313,7 @@ class NDBuffer:
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
# assert array.ndim > 0
assert array.dtype != object
self._data = array
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class Buffer(core.Buffer):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
super().__init__(array_like)

@classmethod
Expand Down Expand Up @@ -143,7 +143,7 @@ class NDBuffer(core.NDBuffer):
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
super().__init__(array)

@classmethod
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class Buffer(core.Buffer):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
if cp is None:
raise ImportError(
"Cannot use zarr.buffer.gpu.Buffer without cupy. Please install cupy."
Expand Down Expand Up @@ -137,7 +137,7 @@ class NDBuffer(core.NDBuffer):
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
if cp is None:
raise ImportError(
"Cannot use zarr.buffer.gpu.NDBuffer without cupy. Please install cupy."
Expand Down
12 changes: 6 additions & 6 deletions src/zarr/core/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def parse_zarr_format(data: Any) -> ZarrFormat:
def parse_attributes(data: Any) -> dict[str, Any]:
if data is None:
return {}
elif isinstance(data, dict) and all(isinstance(v, str) for v in data.keys()):
elif isinstance(data, dict) and all(isinstance(k, str) for k in data):
return data
msg = f"Expected dict with string keys. Got {type(data)} instead."
raise TypeError(msg)
Expand Down Expand Up @@ -104,7 +104,9 @@ def to_buffer_dict(self, prototype: BufferPrototype) -> dict[str, Buffer]:
),
}

def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3):
def __init__(
self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3
) -> None:
attributes_parsed = parse_attributes(attributes)
zarr_format_parsed = parse_zarr_format(zarr_format)

Expand Down Expand Up @@ -202,11 +204,10 @@ def from_dict(
store_path: StorePath,
data: dict[str, Any],
) -> AsyncGroup:
group = cls(
return cls(
metadata=GroupMetadata.from_dict(data),
store_path=store_path,
)
return group

async def getitem(
self,
Expand Down Expand Up @@ -888,8 +889,7 @@ def members(self, max_depth: int | None = 0) -> tuple[tuple[str, Array | Group],
"""
_members = self._sync_iter(self._async_group.members(max_depth=max_depth))

result = tuple((kv[0], _parse_async_node(kv[1])) for kv in _members)
return result
return tuple((kv[0], _parse_async_node(kv[1])) for kv in _members)

def __contains__(self, member: str) -> bool:
return self._sync(self._async_group.contains(member))
Expand Down
24 changes: 14 additions & 10 deletions src/zarr/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class ArrayIndexError(IndexError):
class BoundsCheckError(IndexError):
_msg = ""

def __init__(self, dim_len: int):
def __init__(self, dim_len: int) -> None:
self._msg = f"index out of bounds for dimension with length {dim_len}"


Expand Down Expand Up @@ -255,7 +255,7 @@ class IntDimIndexer:
dim_chunk_len: int
nitems: int = 1

def __init__(self, dim_sel: int, dim_len: int, dim_chunk_len: int):
def __init__(self, dim_sel: int, dim_len: int, dim_chunk_len: int) -> None:
object.__setattr__(self, "dim_sel", normalize_integer_selection(dim_sel, dim_len))
object.__setattr__(self, "dim_len", dim_len)
object.__setattr__(self, "dim_chunk_len", dim_chunk_len)
Expand All @@ -279,7 +279,7 @@ class SliceDimIndexer:
stop: int
step: int

def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int):
def __init__(self, dim_sel: slice, dim_len: int, dim_chunk_len: int) -> None:
# normalize
start, stop, step = dim_sel.indices(dim_len)
if step < 1:
Expand Down Expand Up @@ -453,7 +453,7 @@ def __init__(
selection: BasicSelection,
shape: ChunkCoords,
chunk_grid: ChunkGrid,
):
) -> None:
chunk_shape = get_chunk_shape(chunk_grid)
# handle ellipsis
selection_normalized = replace_ellipsis(selection, shape)
Expand Down Expand Up @@ -509,7 +509,7 @@ class BoolArrayDimIndexer:
nitems: int
dim_chunk_ixs: npt.NDArray[np.intp]

def __init__(self, dim_sel: npt.NDArray[np.bool_], dim_len: int, dim_chunk_len: int):
def __init__(self, dim_sel: npt.NDArray[np.bool_], dim_len: int, dim_chunk_len: int) -> None:
# check number of dimensions
if not is_bool_array(dim_sel, 1):
raise IndexError("Boolean arrays in an orthogonal selection must be 1-dimensional only")
Expand Down Expand Up @@ -626,7 +626,7 @@ def __init__(
wraparound: bool = True,
boundscheck: bool = True,
order: Order = Order.UNKNOWN,
):
) -> None:
# ensure 1d array
dim_sel = np.asanyarray(dim_sel)
if not is_integer_array(dim_sel, 1):
Expand Down Expand Up @@ -766,7 +766,7 @@ class OrthogonalIndexer(Indexer):
is_advanced: bool
drop_axes: tuple[int, ...]

def __init__(self, selection: Selection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(self, selection: Selection, shape: ChunkCoords, chunk_grid: ChunkGrid) -> None:
chunk_shape = get_chunk_shape(chunk_grid)

# handle ellipsis
Expand Down Expand Up @@ -880,7 +880,9 @@ class BlockIndexer(Indexer):
shape: ChunkCoords
drop_axes: ChunkCoords

def __init__(self, selection: BasicSelection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(
self, selection: BasicSelection, shape: ChunkCoords, chunk_grid: ChunkGrid
) -> None:
chunk_shape = get_chunk_shape(chunk_grid)

# handle ellipsis
Expand Down Expand Up @@ -1005,7 +1007,9 @@ class CoordinateIndexer(Indexer):
chunk_shape: ChunkCoords
drop_axes: ChunkCoords

def __init__(self, selection: CoordinateSelection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(
self, selection: CoordinateSelection, shape: ChunkCoords, chunk_grid: ChunkGrid
) -> None:
chunk_shape = get_chunk_shape(chunk_grid)

cdata_shape: ChunkCoords
Expand Down Expand Up @@ -1122,7 +1126,7 @@ def __iter__(self) -> Iterator[ChunkProjection]:

@dataclass(frozen=True)
class MaskIndexer(CoordinateIndexer):
def __init__(self, selection: MaskSelection, shape: ChunkCoords, chunk_grid: ChunkGrid):
def __init__(self, selection: MaskSelection, shape: ChunkCoords, chunk_grid: ChunkGrid) -> None:
# some initial normalization
selection_normalized = cast(tuple[MaskSelection], ensure_tuple(selection))
selection_normalized = cast(tuple[MaskSelection], replace_lists(selection_normalized))
Expand Down
8 changes: 4 additions & 4 deletions src/zarr/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def fully_qualified_name(cls: type) -> str:


def register_codec(key: str, codec_cls: type[Codec]) -> None:
if key not in __codec_registries.keys():
if key not in __codec_registries:
__codec_registries[key] = Registry()
__codec_registries[key].register(codec_cls)

Expand Down Expand Up @@ -158,7 +158,7 @@ def get_pipeline_class(reload_config: bool = False) -> type[CodecPipeline]:
if pipeline_class:
return pipeline_class
raise BadConfigError(
f"Pipeline class '{path}' not found in registered pipelines: {list(__pipeline_registry.keys())}."
f"Pipeline class '{path}' not found in registered pipelines: {list(__pipeline_registry)}."
)


Expand All @@ -172,7 +172,7 @@ def get_buffer_class(reload_config: bool = False) -> type[Buffer]:
if buffer_class:
return buffer_class
raise BadConfigError(
f"Buffer class '{path}' not found in registered buffers: {list(__buffer_registry.keys())}."
f"Buffer class '{path}' not found in registered buffers: {list(__buffer_registry)}."
)


Expand All @@ -185,7 +185,7 @@ def get_ndbuffer_class(reload_config: bool = False) -> type[NDBuffer]:
if ndbuffer_class:
return ndbuffer_class
raise BadConfigError(
f"NDBuffer class '{path}' not found in registered buffers: {list(__ndbuffer_registry.keys())}."
f"NDBuffer class '{path}' not found in registered buffers: {list(__ndbuffer_registry)}."
)


Expand Down
8 changes: 3 additions & 5 deletions src/zarr/store/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,14 @@ def _dereference_path(root: str, path: str) -> str:
assert isinstance(path, str)
root = root.rstrip("/")
path = f"{root}/{path}" if root else path
path = path.rstrip("/")
return path
return path.rstrip("/")


class StorePath:
store: Store
path: str

def __init__(self, store: Store, path: str | None = None):
def __init__(self, store: Store, path: str | None = None) -> None:
self.store = store
self.path = path or ""

Expand Down Expand Up @@ -265,8 +264,7 @@ async def contains_array(store_path: StorePath, zarr_format: ZarrFormat) -> bool
except (ValueError, KeyError):
return False
elif zarr_format == 2:
result = await (store_path / ZARRAY_JSON).exists()
return result
return await (store_path / ZARRAY_JSON).exists()
msg = f"Invalid zarr_format provided. Got {zarr_format}, expected 2 or 3"
raise ValueError(msg)

Expand Down
2 changes: 1 addition & 1 deletion src/zarr/store/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ class LocalStore(Store):

root: Path

def __init__(self, root: Path | str, *, mode: AccessModeLiteral = "r"):
def __init__(self, root: Path | str, *, mode: AccessModeLiteral = "r") -> None:
super().__init__(mode=mode)
if isinstance(root, str):
root = Path(root)
Expand Down
9 changes: 4 additions & 5 deletions src/zarr/store/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __init__(
store_dict: MutableMapping[str, Buffer] | None = None,
*,
mode: AccessModeLiteral = "r",
):
) -> None:
super().__init__(mode=mode)
if store_dict is None:
store_dict = {}
Expand Down Expand Up @@ -80,8 +80,7 @@ async def get_partial_values(
async def _get(key: str, byte_range: tuple[int, int | None]) -> Buffer | None:
return await self.get(key, prototype=prototype, byte_range=byte_range)

vals = await concurrent_map(key_ranges, _get, limit=None)
return vals
return await concurrent_map(key_ranges, _get, limit=None)

async def exists(self, key: str) -> bool:
return key in self._store_dict
Expand Down Expand Up @@ -137,7 +136,7 @@ async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]:
prefix = prefix[:-1]

if prefix == "":
keys_unique = set(k.split("/")[0] for k in self._store_dict.keys())
keys_unique = set(k.split("/")[0] for k in self._store_dict)
else:
# Our dictionary doesn't contain directory markers, but we want to include
# a pseudo directory when there's a nested item and we're listing an
Expand Down Expand Up @@ -166,7 +165,7 @@ def __init__(
store_dict: MutableMapping[str, Buffer] | None = None,
*,
mode: AccessModeLiteral = "r",
):
) -> None:
super().__init__(mode=mode)
if store_dict:
self._store_dict = {k: gpu.Buffer.from_buffer(store_dict[k]) for k in iter(store_dict)}
Expand Down
3 changes: 2 additions & 1 deletion src/zarr/store/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(
mode: AccessModeLiteral = "r",
path: str = "/",
allowed_exceptions: tuple[type[Exception], ...] = ALLOWED_EXCEPTIONS,
):
) -> None:
"""
Parameters
----------
Expand All @@ -49,6 +49,7 @@ def __init__(
keys, rather than some other IO failure
storage_options: passed on to fsspec to make the filesystem instance. If url is a UPath,
this must not be used.
"""
super().__init__(mode=mode)
self.fs = fs
Expand Down
3 changes: 1 addition & 2 deletions src/zarr/testing/strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,5 +171,4 @@ def key_ranges(keys: SearchStrategy = node_names) -> SearchStrategy[list]:
st.none() | st.integers(min_value=0), st.none() | st.integers(min_value=0)
)
key_tuple = st.tuples(keys, byte_ranges)
key_range_st = st.lists(key_tuple, min_size=1, max_size=10)
return key_range_st
return st.lists(key_tuple, min_size=1, max_size=10)
Loading

0 comments on commit d90ed39

Please sign in to comment.