Skip to content

Commit

Permalink
Enforce ruff/flake8-comprehensions rules (C4) (zarr-developers#2239)
Browse files Browse the repository at this point in the history
* Apply ruff/flake8-comprehensions rule C401

C401 Unnecessary generator (rewrite as a `set` comprehension)

* Apply ruff/flake8-comprehensions rule C408

C408 Unnecessary `dict` call (rewrite as a literal)

* Apply ruff/flake8-comprehensions rule C409

C409 Unnecessary list comprehension passed to `tuple()` (rewrite as a generator)

* Apply ruff/flake8-comprehensions rule C416

C416 Unnecessary `list` comprehension (rewrite using `list()`)

* Enforce ruff/flake8-comprehensions rules (C4)

---------

Co-authored-by: Joe Hamman <joe@earthmover.io>
  • Loading branch information
2 people authored and d-v-b committed Sep 26, 2024
1 parent 748e58b commit 009e888
Show file tree
Hide file tree
Showing 7 changed files with 10 additions and 9 deletions.
2 changes: 1 addition & 1 deletion bench/compress_normal.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
a,
chunks=1000000,
compression="blosc",
compression_opts=dict(cname="lz4", clevel=5, shuffle=2),
compression_opts={"cname": "lz4", "clevel": 5, "shuffle": 2},
)
print(z)

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ extend-exclude = [
[tool.ruff.lint]
extend-select = [
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"FLY", # flynt
"I", # isort
"ISC", # flake8-implicit-str-concat
Expand Down
8 changes: 4 additions & 4 deletions src/zarr/codecs/sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def is_dense(self, chunk_byte_length: int) -> bool:

# Are all non-empty offsets unique?
if len(
set(offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64)
{offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64}
) != len(sorted_offsets_and_lengths):
return False

Expand Down Expand Up @@ -380,8 +380,8 @@ def to_dict(self) -> dict[str, JSON]:
"name": "sharding_indexed",
"configuration": {
"chunk_shape": self.chunk_shape,
"codecs": tuple([s.to_dict() for s in self.codecs]),
"index_codecs": tuple([s.to_dict() for s in self.index_codecs]),
"codecs": tuple(s.to_dict() for s in self.codecs),
"index_codecs": tuple(s.to_dict() for s in self.index_codecs),
"index_location": self.index_location.value,
},
}
Expand Down Expand Up @@ -477,7 +477,7 @@ async def _decode_partial_single(
)

indexed_chunks = list(indexer)
all_chunk_coords = set(chunk_coords for chunk_coords, _, _ in indexed_chunks)
all_chunk_coords = {chunk_coords for chunk_coords, _, _ in indexed_chunks}

# reading bytes of all requested chunks
shard_dict: ShardMapping = {}
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/store/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]:
prefix = prefix[:-1]

if prefix == "":
keys_unique = set(k.split("/")[0] for k in self._store_dict)
keys_unique = {k.split("/")[0] for k in self._store_dict}
else:
# Our dictionary doesn't contain directory markers, but we want to include
# a pseudo directory when there's a nested item and we're listing an
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/store/zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]:
keys = self._zf.namelist()
seen = set()
if prefix == "":
keys_unique = set(k.split("/")[0] for k in keys)
keys_unique = {k.split("/")[0] for k in keys}
for key in keys_unique:
if key not in seen:
seen.add(key)
Expand Down
2 changes: 1 addition & 1 deletion tests/v3/test_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def test_group_iter(store: Store, zarr_format: ZarrFormat) -> None:

group = Group.from_store(store, zarr_format=zarr_format)
with pytest.raises(NotImplementedError):
[x for x in group]
list(group)


def test_group_len(store: Store, zarr_format: ZarrFormat) -> None:
Expand Down
2 changes: 1 addition & 1 deletion tests/v3/test_store/test_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ async def test_basic() -> None:
store = RemoteStore.from_url(
f"s3://{test_bucket_name}",
mode="w",
storage_options=dict(endpoint_url=endpoint_url, anon=False),
storage_options={"endpoint_url": endpoint_url, "anon": False},
)
assert await _collect_aiterator(store.list()) == ()
assert not await store.exists("foo")
Expand Down

0 comments on commit 009e888

Please sign in to comment.