Skip to content

Commit 3143e97

Browse files
Apply ruff/flake8-type-checking rule TC006
TC006 Add quotes to type expression in `typing.cast()`
1 parent d5b59c5 commit 3143e97

17 files changed

+63
-62
lines changed

src/zarr/api/asynchronous.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -329,7 +329,7 @@ async def open(
329329
try:
330330
metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format)
331331
# TODO: remove this cast when we fix typing for array metadata dicts
332-
_metadata_dict = cast(ArrayMetadataDict, metadata_dict)
332+
_metadata_dict = cast("ArrayMetadataDict", metadata_dict)
333333
# for v2, the above would already have raised an exception if not an array
334334
zarr_format = _metadata_dict["zarr_format"]
335335
is_v3_array = zarr_format == 3 and _metadata_dict.get("node_type") == "array"

src/zarr/codecs/crc32c_.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,9 @@ async def _decode_single(
4040
inner_bytes = data[:-4]
4141

4242
# Need to do a manual cast until https://github.com/numpy/numpy/issues/26783 is resolved
43-
computed_checksum = np.uint32(crc32c(cast(typing_extensions.Buffer, inner_bytes))).tobytes()
43+
computed_checksum = np.uint32(
44+
crc32c(cast("typing_extensions.Buffer", inner_bytes))
45+
).tobytes()
4446
stored_checksum = bytes(crc32_bytes)
4547
if computed_checksum != stored_checksum:
4648
raise ValueError(
@@ -55,7 +57,7 @@ async def _encode_single(
5557
) -> Buffer | None:
5658
data = chunk_bytes.as_numpy_array()
5759
# Calculate the checksum and "cast" it to a numpy array
58-
checksum = np.array([crc32c(cast(typing_extensions.Buffer, data))], dtype=np.uint32)
60+
checksum = np.array([crc32c(cast("typing_extensions.Buffer", data))], dtype=np.uint32)
5961
# Append the checksum (as bytes) to the data
6062
return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("B")))
6163

src/zarr/codecs/sharding.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ class _ShardIndex(NamedTuple):
115115
def chunks_per_shard(self) -> ChunkCoords:
116116
result = tuple(self.offsets_and_lengths.shape[0:-1])
117117
# The cast is required until https://github.com/numpy/numpy/pull/27211 is merged
118-
return cast(ChunkCoords, result)
118+
return cast("ChunkCoords", result)
119119

120120
def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords:
121121
return tuple(

src/zarr/codecs/transpose.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def parse_transpose_order(data: JSON | Iterable[int]) -> tuple[int, ...]:
2323
raise TypeError(f"Expected an iterable. Got {data} instead.")
2424
if not all(isinstance(a, int) for a in data):
2525
raise TypeError(f"Expected an iterable of integers. Got {data} instead.")
26-
return tuple(cast(Iterable[int], data))
26+
return tuple(cast("Iterable[int]", data))
2727

2828

2929
@dataclass(frozen=True)

src/zarr/core/array.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -903,7 +903,7 @@ async def open(
903903
store_path = await make_store_path(store)
904904
metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format)
905905
# TODO: remove this cast when we have better type hints
906-
_metadata_dict = cast(ArrayV3MetadataDict, metadata_dict)
906+
_metadata_dict = cast("ArrayV3MetadataDict", metadata_dict)
907907
return cls(store_path=store_path, metadata=_metadata_dict)
908908

909909
@property
@@ -1399,7 +1399,7 @@ async def _set_selection(
13991399
if isinstance(array_like, np._typing._SupportsArrayFunc):
14001400
# TODO: need to handle array types that don't support __array_function__
14011401
# like PyTorch and JAX
1402-
array_like_ = cast(np._typing._SupportsArrayFunc, array_like)
1402+
array_like_ = cast("np._typing._SupportsArrayFunc", array_like)
14031403
value = np.asanyarray(value, dtype=self.metadata.dtype, like=array_like_)
14041404
else:
14051405
if not hasattr(value, "shape"):
@@ -1413,7 +1413,7 @@ async def _set_selection(
14131413
value = value.astype(dtype=self.metadata.dtype, order="A")
14141414
else:
14151415
value = np.array(value, dtype=self.metadata.dtype, order="A")
1416-
value = cast(NDArrayLike, value)
1416+
value = cast("NDArrayLike", value)
14171417
# We accept any ndarray like object from the user and convert it
14181418
# to a NDBuffer (or subclass). From this point onwards, we only pass
14191419
# Buffer and NDBuffer between components.
@@ -2436,11 +2436,11 @@ def __getitem__(self, selection: Selection) -> NDArrayLikeOrScalar:
24362436
"""
24372437
fields, pure_selection = pop_fields(selection)
24382438
if is_pure_fancy_indexing(pure_selection, self.ndim):
2439-
return self.vindex[cast(CoordinateSelection | MaskSelection, selection)]
2439+
return self.vindex[cast("CoordinateSelection | MaskSelection", selection)]
24402440
elif is_pure_orthogonal_indexing(pure_selection, self.ndim):
24412441
return self.get_orthogonal_selection(pure_selection, fields=fields)
24422442
else:
2443-
return self.get_basic_selection(cast(BasicSelection, pure_selection), fields=fields)
2443+
return self.get_basic_selection(cast("BasicSelection", pure_selection), fields=fields)
24442444

24452445
def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
24462446
"""Modify data for an item or region of the array.
@@ -2535,11 +2535,11 @@ def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
25352535
"""
25362536
fields, pure_selection = pop_fields(selection)
25372537
if is_pure_fancy_indexing(pure_selection, self.ndim):
2538-
self.vindex[cast(CoordinateSelection | MaskSelection, selection)] = value
2538+
self.vindex[cast("CoordinateSelection | MaskSelection", selection)] = value
25392539
elif is_pure_orthogonal_indexing(pure_selection, self.ndim):
25402540
self.set_orthogonal_selection(pure_selection, value, fields=fields)
25412541
else:
2542-
self.set_basic_selection(cast(BasicSelection, pure_selection), value, fields=fields)
2542+
self.set_basic_selection(cast("BasicSelection", pure_selection), value, fields=fields)
25432543

25442544
@_deprecate_positional_args
25452545
def get_basic_selection(
@@ -3657,7 +3657,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array:
36573657
# TODO: remove this cast when type inference improves
36583658
new_array = sync(self._async_array.update_attributes(new_attributes))
36593659
# TODO: remove this cast when type inference improves
3660-
_new_array = cast(AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata], new_array)
3660+
_new_array = cast("AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata]", new_array)
36613661
return type(self)(_new_array)
36623662

36633663
def __repr__(self) -> str:
@@ -4252,7 +4252,7 @@ async def init_array(
42524252
serializer=serializer,
42534253
dtype=dtype_parsed,
42544254
)
4255-
sub_codecs = cast(tuple[Codec, ...], (*array_array, array_bytes, *bytes_bytes))
4255+
sub_codecs = cast("tuple[Codec, ...]", (*array_array, array_bytes, *bytes_bytes))
42564256
codecs_out: tuple[Codec, ...]
42574257
if shard_shape_parsed is not None:
42584258
index_location = None
@@ -4523,7 +4523,7 @@ def _parse_keep_array_attr(
45234523
compressors = "auto"
45244524
if serializer == "keep":
45254525
if zarr_format == 3 and data.metadata.zarr_format == 3:
4526-
serializer = cast(SerializerLike, data.serializer)
4526+
serializer = cast("SerializerLike", data.serializer)
45274527
else:
45284528
serializer = "auto"
45294529
if fill_value is None:
@@ -4701,7 +4701,7 @@ def _parse_chunk_encoding_v3(
47014701
if isinstance(filters, dict | Codec):
47024702
maybe_array_array = (filters,)
47034703
else:
4704-
maybe_array_array = cast(Iterable[Codec | dict[str, JSON]], filters)
4704+
maybe_array_array = cast("Iterable[Codec | dict[str, JSON]]", filters)
47054705
out_array_array = tuple(_parse_array_array_codec(c) for c in maybe_array_array)
47064706

47074707
if serializer == "auto":
@@ -4718,7 +4718,7 @@ def _parse_chunk_encoding_v3(
47184718
if isinstance(compressors, dict | Codec):
47194719
maybe_bytes_bytes = (compressors,)
47204720
else:
4721-
maybe_bytes_bytes = cast(Iterable[Codec | dict[str, JSON]], compressors)
4721+
maybe_bytes_bytes = cast("Iterable[Codec | dict[str, JSON]]", compressors)
47224722

47234723
out_bytes_bytes = tuple(_parse_bytes_bytes_codec(c) for c in maybe_bytes_bytes)
47244724

src/zarr/core/array_spec.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def from_dict(cls, data: ArrayConfigParams) -> Self:
6464
"""
6565
kwargs_out: ArrayConfigParams = {}
6666
for f in fields(ArrayConfig):
67-
field_name = cast(Literal["order", "write_empty_chunks"], f.name)
67+
field_name = cast("Literal['order', 'write_empty_chunks']", f.name)
6868
if field_name not in data:
6969
kwargs_out[field_name] = zarr_config.get(f"array.{field_name}")
7070
else:

src/zarr/core/buffer/core.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ def create_zero_length(cls) -> Self:
159159
if cls is Buffer:
160160
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
161161
return cls(
162-
cast(ArrayLike, None)
162+
cast("ArrayLike", None)
163163
) # This line will never be reached, but it satisfies the type checker
164164

165165
@classmethod
@@ -207,7 +207,7 @@ def from_buffer(cls, buffer: Buffer) -> Self:
207207
if cls is Buffer:
208208
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
209209
return cls(
210-
cast(ArrayLike, None)
210+
cast("ArrayLike", None)
211211
) # This line will never be reached, but it satisfies the type checker
212212

213213
@classmethod
@@ -227,7 +227,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
227227
if cls is Buffer:
228228
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
229229
return cls(
230-
cast(ArrayLike, None)
230+
cast("ArrayLike", None)
231231
) # This line will never be reached, but it satisfies the type checker
232232

233233
def as_array_like(self) -> ArrayLike:
@@ -371,7 +371,7 @@ def create(
371371
"Cannot call abstract method on the abstract class 'NDBuffer'"
372372
)
373373
return cls(
374-
cast(NDArrayLike, None)
374+
cast("NDArrayLike", None)
375375
) # This line will never be reached, but it satisfies the type checker
376376

377377
@classmethod
@@ -408,7 +408,7 @@ def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self:
408408
"Cannot call abstract method on the abstract class 'NDBuffer'"
409409
)
410410
return cls(
411-
cast(NDArrayLike, None)
411+
cast("NDArrayLike", None)
412412
) # This line will never be reached, but it satisfies the type checker
413413

414414
def as_ndarray_like(self) -> NDArrayLike:
@@ -440,7 +440,7 @@ def as_scalar(self) -> ScalarType:
440440
"""Returns the buffer as a scalar value"""
441441
if self._data.size != 1:
442442
raise ValueError("Buffer does not contain a single scalar value")
443-
return cast(ScalarType, self.as_numpy_array()[()])
443+
return cast("ScalarType", self.as_numpy_array()[()])
444444

445445
@property
446446
def dtype(self) -> np.dtype[Any]:

src/zarr/core/buffer/gpu.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
103103
return cls.from_array_like(cp.frombuffer(bytes_like, dtype="B"))
104104

105105
def as_numpy_array(self) -> npt.NDArray[Any]:
106-
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
106+
return cast("npt.NDArray[Any]", cp.asnumpy(self._data))
107107

108108
def __add__(self, other: core.Buffer) -> Self:
109109
other_array = other.as_array_like()
@@ -204,7 +204,7 @@ def as_numpy_array(self) -> npt.NDArray[Any]:
204204
-------
205205
NumPy array of this buffer (might be a data copy)
206206
"""
207-
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
207+
return cast("npt.NDArray[Any]", cp.asnumpy(self._data))
208208

209209
def __getitem__(self, key: Any) -> Self:
210210
return self.__class__(self._data.__getitem__(key))

src/zarr/core/chunk_key_encodings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
def parse_separator(data: JSON) -> SeparatorLiteral:
2121
if data not in (".", "/"):
2222
raise ValueError(f"Expected an '.' or '/' separator. Got {data} instead.")
23-
return cast(SeparatorLiteral, data)
23+
return cast("SeparatorLiteral", data)
2424

2525

2626
class ChunkKeyEncodingParams(TypedDict):
@@ -48,7 +48,7 @@ def from_dict(cls, data: dict[str, JSON] | ChunkKeyEncodingLike) -> ChunkKeyEnco
4848
data = {"name": data["name"], "configuration": {"separator": data["separator"]}}
4949

5050
# TODO: remove this cast when we are statically typing the JSON metadata completely.
51-
data = cast(dict[str, JSON], data)
51+
data = cast("dict[str, JSON]", data)
5252

5353
# configuration is optional for chunk key encodings
5454
name_parsed, config_parsed = parse_named_configuration(data, require_configuration=False)

src/zarr/core/common.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def parse_fill_value(data: Any) -> Any:
158158

159159
def parse_order(data: Any) -> Literal["C", "F"]:
160160
if data in ("C", "F"):
161-
return cast(Literal["C", "F"], data)
161+
return cast("Literal['C', 'F']", data)
162162
raise ValueError(f"Expected one of ('C', 'F'), got {data} instead.")
163163

164164

@@ -202,4 +202,4 @@ def _warn_order_kwarg() -> None:
202202

203203
def _default_zarr_format() -> ZarrFormat:
204204
"""Return the default zarr_version"""
205-
return cast(ZarrFormat, int(zarr_config.get("default_zarr_format", 3)))
205+
return cast("ZarrFormat", int(zarr_config.get("default_zarr_format", 3)))

src/zarr/core/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,6 @@ def enable_gpu(self) -> ConfigSet:
134134

135135
def parse_indexing_order(data: Any) -> Literal["C", "F"]:
136136
if data in ("C", "F"):
137-
return cast(Literal["C", "F"], data)
137+
return cast("Literal['C', 'F']", data)
138138
msg = f"Expected one of ('C', 'F'), got {data} instead."
139139
raise ValueError(msg)

src/zarr/core/group.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -81,15 +81,15 @@
8181
def parse_zarr_format(data: Any) -> ZarrFormat:
8282
"""Parse the zarr_format field from metadata."""
8383
if data in (2, 3):
84-
return cast(ZarrFormat, data)
84+
return cast("ZarrFormat", data)
8585
msg = f"Invalid zarr_format. Expected one of 2 or 3. Got {data}."
8686
raise ValueError(msg)
8787

8888

8989
def parse_node_type(data: Any) -> NodeType:
9090
"""Parse the node_type field from metadata."""
9191
if data in ("array", "group"):
92-
return cast(Literal["array", "group"], data)
92+
return cast("Literal['array', 'group']", data)
9393
raise MetadataValidationError("node_type", "array or group", data)
9494

9595

@@ -362,7 +362,7 @@ def to_buffer_dict(self, prototype: BufferPrototype) -> dict[str, Buffer]:
362362
# it's an array
363363
if isinstance(v.get("fill_value", None), np.void):
364364
v["fill_value"] = base64.standard_b64encode(
365-
cast(bytes, v["fill_value"])
365+
cast("bytes", v["fill_value"])
366366
).decode("ascii")
367367
else:
368368
v = _replace_special_floats(v)
@@ -3246,8 +3246,7 @@ def _ensure_consistent_zarr_format(
32463246
raise ValueError(msg)
32473247

32483248
return cast(
3249-
Mapping[str, GroupMetadata | ArrayV2Metadata]
3250-
| Mapping[str, GroupMetadata | ArrayV3Metadata],
3249+
"Mapping[str, GroupMetadata | ArrayV2Metadata] | Mapping[str, GroupMetadata | ArrayV3Metadata]",
32513250
data,
32523251
)
32533252

0 commit comments

Comments
 (0)