diff --git a/bench/compress_normal.py b/bench/compress_normal.py index 608cfe8dc..179520a0e 100644 --- a/bench/compress_normal.py +++ b/bench/compress_normal.py @@ -16,7 +16,7 @@ a, chunks=1000000, compression="blosc", - compression_opts=dict(cname="lz4", clevel=5, shuffle=2), + compression_opts={"cname": "lz4", "clevel": 5, "shuffle": 2}, ) print(z) diff --git a/pyproject.toml b/pyproject.toml index a10c22d08..33aa53814 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -208,6 +208,7 @@ extend-exclude = [ [tool.ruff.lint] extend-select = [ "B", # flake8-bugbear + "C4", # flake8-comprehensions "FLY", # flynt "I", # isort "ISC", # flake8-implicit-str-concat diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 3ae51ce54..6282750f2 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -151,7 +151,7 @@ def is_dense(self, chunk_byte_length: int) -> bool: # Are all non-empty offsets unique? if len( - set(offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64) + {offset for offset, _ in sorted_offsets_and_lengths if offset != MAX_UINT_64} ) != len(sorted_offsets_and_lengths): return False @@ -380,8 +380,8 @@ def to_dict(self) -> dict[str, JSON]: "name": "sharding_indexed", "configuration": { "chunk_shape": self.chunk_shape, - "codecs": tuple([s.to_dict() for s in self.codecs]), - "index_codecs": tuple([s.to_dict() for s in self.index_codecs]), + "codecs": tuple(s.to_dict() for s in self.codecs), + "index_codecs": tuple(s.to_dict() for s in self.index_codecs), "index_location": self.index_location.value, }, } @@ -477,7 +477,7 @@ async def _decode_partial_single( ) indexed_chunks = list(indexer) - all_chunk_coords = set(chunk_coords for chunk_coords, _, _ in indexed_chunks) + all_chunk_coords = {chunk_coords for chunk_coords, _, _ in indexed_chunks} # reading bytes of all requested chunks shard_dict: ShardMapping = {} diff --git a/src/zarr/store/memory.py b/src/zarr/store/memory.py index eb1ba9212..49e6cdf01 100644 --- a/src/zarr/store/memory.py +++ b/src/zarr/store/memory.py @@ -146,7 +146,7 @@ async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: prefix = prefix[:-1] if prefix == "": - keys_unique = set(k.split("/")[0] for k in self._store_dict) + keys_unique = {k.split("/")[0] for k in self._store_dict} else: # Our dictionary doesn't contain directory markers, but we want to include # a pseudo directory when there's a nested item and we're listing an diff --git a/src/zarr/store/zip.py b/src/zarr/store/zip.py index 456657ccc..c2bfb09a3 100644 --- a/src/zarr/store/zip.py +++ b/src/zarr/store/zip.py @@ -237,7 +237,7 @@ async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]: keys = self._zf.namelist() seen = set() if prefix == "": - keys_unique = set(k.split("/")[0] for k in keys) + keys_unique = {k.split("/")[0] for k in keys} for key in keys_unique: if key not in seen: seen.add(key) diff --git a/tests/v3/test_group.py b/tests/v3/test_group.py index 22499dfb4..8c6464d3b 100644 --- a/tests/v3/test_group.py +++ b/tests/v3/test_group.py @@ -271,7 +271,7 @@ def test_group_iter(store: Store, zarr_format: ZarrFormat) -> None: group = Group.from_store(store, zarr_format=zarr_format) with pytest.raises(NotImplementedError): - [x for x in group] + list(group) def test_group_len(store: Store, zarr_format: ZarrFormat) -> None: diff --git a/tests/v3/test_store/test_remote.py b/tests/v3/test_store/test_remote.py index 6010f7eca..ca74fc184 100644 --- a/tests/v3/test_store/test_remote.py +++ b/tests/v3/test_store/test_remote.py @@ -92,7 +92,7 @@ async def test_basic() -> None: store = RemoteStore.from_url( f"s3://{test_bucket_name}", mode="w", - storage_options=dict(endpoint_url=endpoint_url, anon=False), + storage_options={"endpoint_url": endpoint_url, "anon": False}, ) assert await _collect_aiterator(store.list()) == () assert not await store.exists("foo")