diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 75ef0face8..e7d3bdd61c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ ci: default_stages: [pre-commit, pre-push] repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.9 + rev: v0.11.8 hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -38,7 +38,7 @@ repos: # Tests - pytest - repo: https://github.com/scientific-python/cookie - rev: 2025.01.22 + rev: 2025.05.02 hooks: - id: sp-repo-review - repo: https://github.com/pre-commit/pygrep-hooks diff --git a/src/zarr/api/asynchronous.py b/src/zarr/api/asynchronous.py index 9b8b43a517..48334d766b 100644 --- a/src/zarr/api/asynchronous.py +++ b/src/zarr/api/asynchronous.py @@ -321,7 +321,7 @@ async def open( try: metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format) # TODO: remove this cast when we fix typing for array metadata dicts - _metadata_dict = cast(ArrayMetadataDict, metadata_dict) + _metadata_dict = cast("ArrayMetadataDict", metadata_dict) # for v2, the above would already have raised an exception if not an array zarr_format = _metadata_dict["zarr_format"] is_v3_array = zarr_format == 3 and _metadata_dict.get("node_type") == "array" diff --git a/src/zarr/codecs/crc32c_.py b/src/zarr/codecs/crc32c_.py index ab8a57eba7..6da673ceac 100644 --- a/src/zarr/codecs/crc32c_.py +++ b/src/zarr/codecs/crc32c_.py @@ -40,7 +40,9 @@ async def _decode_single( inner_bytes = data[:-4] # Need to do a manual cast until https://github.com/numpy/numpy/issues/26783 is resolved - computed_checksum = np.uint32(crc32c(cast(typing_extensions.Buffer, inner_bytes))).tobytes() + computed_checksum = np.uint32( + crc32c(cast("typing_extensions.Buffer", inner_bytes)) + ).tobytes() stored_checksum = bytes(crc32_bytes) if computed_checksum != stored_checksum: raise ValueError( @@ -55,7 +57,7 @@ async def _encode_single( ) -> Buffer | None: data = chunk_bytes.as_numpy_array() # Calculate the checksum and "cast" it to a numpy array - checksum = np.array([crc32c(cast(typing_extensions.Buffer, data))], dtype=np.uint32) + checksum = np.array([crc32c(cast("typing_extensions.Buffer", data))], dtype=np.uint32) # Append the checksum (as bytes) to the data return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("B"))) diff --git a/src/zarr/codecs/sharding.py b/src/zarr/codecs/sharding.py index 42b1313fac..2ede9b9502 100644 --- a/src/zarr/codecs/sharding.py +++ b/src/zarr/codecs/sharding.py @@ -115,7 +115,7 @@ class _ShardIndex(NamedTuple): def chunks_per_shard(self) -> ChunkCoords: result = tuple(self.offsets_and_lengths.shape[0:-1]) # The cast is required until https://github.com/numpy/numpy/pull/27211 is merged - return cast(ChunkCoords, result) + return cast("ChunkCoords", result) def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords: return tuple( diff --git a/src/zarr/codecs/transpose.py b/src/zarr/codecs/transpose.py index 1aa1eb40e2..85e4526b8b 100644 --- a/src/zarr/codecs/transpose.py +++ b/src/zarr/codecs/transpose.py @@ -23,7 +23,7 @@ def parse_transpose_order(data: JSON | Iterable[int]) -> tuple[int, ...]: raise TypeError(f"Expected an iterable. Got {data} instead.") if not all(isinstance(a, int) for a in data): raise TypeError(f"Expected an iterable of integers. Got {data} instead.") - return tuple(cast(Iterable[int], data)) + return tuple(cast("Iterable[int]", data)) @dataclass(frozen=True) diff --git a/src/zarr/core/array.py b/src/zarr/core/array.py index b0e8b03cd7..35b285fe22 100644 --- a/src/zarr/core/array.py +++ b/src/zarr/core/array.py @@ -268,7 +268,7 @@ def __init__( if isinstance(metadata, dict): zarr_format = metadata["zarr_format"] # TODO: remove this when we extensively type the dict representation of metadata - _metadata = cast(dict[str, JSON], metadata) + _metadata = cast("dict[str, JSON]", metadata) if zarr_format == 2: metadata = ArrayV2Metadata.from_dict(_metadata) elif zarr_format == 3: @@ -898,7 +898,7 @@ async def open( store_path = await make_store_path(store) metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format) # TODO: remove this cast when we have better type hints - _metadata_dict = cast(ArrayV3MetadataDict, metadata_dict) + _metadata_dict = cast("ArrayV3MetadataDict", metadata_dict) return cls(store_path=store_path, metadata=_metadata_dict) @property @@ -1394,7 +1394,7 @@ async def _set_selection( if isinstance(array_like, np._typing._SupportsArrayFunc): # TODO: need to handle array types that don't support __array_function__ # like PyTorch and JAX - array_like_ = cast(np._typing._SupportsArrayFunc, array_like) + array_like_ = cast("np._typing._SupportsArrayFunc", array_like) value = np.asanyarray(value, dtype=self.metadata.dtype, like=array_like_) else: if not hasattr(value, "shape"): @@ -1408,7 +1408,7 @@ async def _set_selection( value = value.astype(dtype=self.metadata.dtype, order="A") else: value = np.array(value, dtype=self.metadata.dtype, order="A") - value = cast(NDArrayLike, value) + value = cast("NDArrayLike", value) # We accept any ndarray like object from the user and convert it # to a NDBuffer (or subclass). From this point onwards, we only pass # Buffer and NDBuffer between components. @@ -2431,11 +2431,11 @@ def __getitem__(self, selection: Selection) -> NDArrayLikeOrScalar: """ fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): - return self.vindex[cast(CoordinateSelection | MaskSelection, selection)] + return self.vindex[cast("CoordinateSelection | MaskSelection", selection)] elif is_pure_orthogonal_indexing(pure_selection, self.ndim): return self.get_orthogonal_selection(pure_selection, fields=fields) else: - return self.get_basic_selection(cast(BasicSelection, pure_selection), fields=fields) + return self.get_basic_selection(cast("BasicSelection", pure_selection), fields=fields) def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None: """Modify data for an item or region of the array. @@ -2530,11 +2530,11 @@ def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None: """ fields, pure_selection = pop_fields(selection) if is_pure_fancy_indexing(pure_selection, self.ndim): - self.vindex[cast(CoordinateSelection | MaskSelection, selection)] = value + self.vindex[cast("CoordinateSelection | MaskSelection", selection)] = value elif is_pure_orthogonal_indexing(pure_selection, self.ndim): self.set_orthogonal_selection(pure_selection, value, fields=fields) else: - self.set_basic_selection(cast(BasicSelection, pure_selection), value, fields=fields) + self.set_basic_selection(cast("BasicSelection", pure_selection), value, fields=fields) @_deprecate_positional_args def get_basic_selection( @@ -3652,7 +3652,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array: # TODO: remove this cast when type inference improves new_array = sync(self._async_array.update_attributes(new_attributes)) # TODO: remove this cast when type inference improves - _new_array = cast(AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata], new_array) + _new_array = cast("AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata]", new_array) return type(self)(_new_array) def __repr__(self) -> str: @@ -4238,7 +4238,7 @@ async def init_array( serializer=serializer, dtype=dtype_parsed, ) - sub_codecs = cast(tuple[Codec, ...], (*array_array, array_bytes, *bytes_bytes)) + sub_codecs = cast("tuple[Codec, ...]", (*array_array, array_bytes, *bytes_bytes)) codecs_out: tuple[Codec, ...] if shard_shape_parsed is not None: index_location = None @@ -4509,7 +4509,7 @@ def _parse_keep_array_attr( compressors = "auto" if serializer == "keep": if zarr_format == 3 and data.metadata.zarr_format == 3: - serializer = cast(SerializerLike, data.serializer) + serializer = cast("SerializerLike", data.serializer) else: serializer = "auto" if fill_value is None: @@ -4687,7 +4687,7 @@ def _parse_chunk_encoding_v3( if isinstance(filters, dict | Codec): maybe_array_array = (filters,) else: - maybe_array_array = cast(Iterable[Codec | dict[str, JSON]], filters) + maybe_array_array = cast("Iterable[Codec | dict[str, JSON]]", filters) out_array_array = tuple(_parse_array_array_codec(c) for c in maybe_array_array) if serializer == "auto": @@ -4704,7 +4704,7 @@ def _parse_chunk_encoding_v3( if isinstance(compressors, dict | Codec): maybe_bytes_bytes = (compressors,) else: - maybe_bytes_bytes = cast(Iterable[Codec | dict[str, JSON]], compressors) + maybe_bytes_bytes = cast("Iterable[Codec | dict[str, JSON]]", compressors) out_bytes_bytes = tuple(_parse_bytes_bytes_codec(c) for c in maybe_bytes_bytes) diff --git a/src/zarr/core/array_spec.py b/src/zarr/core/array_spec.py index 59d3cc6b40..6cd27b30eb 100644 --- a/src/zarr/core/array_spec.py +++ b/src/zarr/core/array_spec.py @@ -64,7 +64,7 @@ def from_dict(cls, data: ArrayConfigParams) -> Self: """ kwargs_out: ArrayConfigParams = {} for f in fields(ArrayConfig): - field_name = cast(Literal["order", "write_empty_chunks"], f.name) + field_name = cast("Literal['order', 'write_empty_chunks']", f.name) if field_name not in data: kwargs_out[field_name] = zarr_config.get(f"array.{field_name}") else: diff --git a/src/zarr/core/buffer/core.py b/src/zarr/core/buffer/core.py index cfcd7e6633..e86f79890e 100644 --- a/src/zarr/core/buffer/core.py +++ b/src/zarr/core/buffer/core.py @@ -159,7 +159,7 @@ def create_zero_length(cls) -> Self: if cls is Buffer: raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'") return cls( - cast(ArrayLike, None) + cast("ArrayLike", None) ) # This line will never be reached, but it satisfies the type checker @classmethod @@ -207,7 +207,7 @@ def from_buffer(cls, buffer: Buffer) -> Self: if cls is Buffer: raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'") return cls( - cast(ArrayLike, None) + cast("ArrayLike", None) ) # This line will never be reached, but it satisfies the type checker @classmethod @@ -227,7 +227,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self: if cls is Buffer: raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'") return cls( - cast(ArrayLike, None) + cast("ArrayLike", None) ) # This line will never be reached, but it satisfies the type checker def as_array_like(self) -> ArrayLike: @@ -358,7 +358,7 @@ def create( "Cannot call abstract method on the abstract class 'NDBuffer'" ) return cls( - cast(NDArrayLike, None) + cast("NDArrayLike", None) ) # This line will never be reached, but it satisfies the type checker @classmethod @@ -395,7 +395,7 @@ def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self: "Cannot call abstract method on the abstract class 'NDBuffer'" ) return cls( - cast(NDArrayLike, None) + cast("NDArrayLike", None) ) # This line will never be reached, but it satisfies the type checker def as_ndarray_like(self) -> NDArrayLike: @@ -427,7 +427,7 @@ def as_scalar(self) -> ScalarType: """Returns the buffer as a scalar value""" if self._data.size != 1: raise ValueError("Buffer does not contain a single scalar value") - return cast(ScalarType, self.as_numpy_array()[()]) + return cast("ScalarType", self.as_numpy_array()[()]) @property def dtype(self) -> np.dtype[Any]: diff --git a/src/zarr/core/buffer/gpu.py b/src/zarr/core/buffer/gpu.py index 77d2731c71..88746c5fac 100644 --- a/src/zarr/core/buffer/gpu.py +++ b/src/zarr/core/buffer/gpu.py @@ -103,7 +103,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self: return cls.from_array_like(cp.frombuffer(bytes_like, dtype="B")) def as_numpy_array(self) -> npt.NDArray[Any]: - return cast(npt.NDArray[Any], cp.asnumpy(self._data)) + return cast("npt.NDArray[Any]", cp.asnumpy(self._data)) def __add__(self, other: core.Buffer) -> Self: other_array = other.as_array_like() @@ -204,7 +204,7 @@ def as_numpy_array(self) -> npt.NDArray[Any]: ------- NumPy array of this buffer (might be a data copy) """ - return cast(npt.NDArray[Any], cp.asnumpy(self._data)) + return cast("npt.NDArray[Any]", cp.asnumpy(self._data)) def __getitem__(self, key: Any) -> Self: return self.__class__(self._data.__getitem__(key)) diff --git a/src/zarr/core/chunk_key_encodings.py b/src/zarr/core/chunk_key_encodings.py index 103472c3b4..91dfc90365 100644 --- a/src/zarr/core/chunk_key_encodings.py +++ b/src/zarr/core/chunk_key_encodings.py @@ -20,7 +20,7 @@ def parse_separator(data: JSON) -> SeparatorLiteral: if data not in (".", "/"): raise ValueError(f"Expected an '.' or '/' separator. Got {data} instead.") - return cast(SeparatorLiteral, data) + return cast("SeparatorLiteral", data) class ChunkKeyEncodingParams(TypedDict): @@ -48,7 +48,7 @@ def from_dict(cls, data: dict[str, JSON] | ChunkKeyEncodingLike) -> ChunkKeyEnco data = {"name": data["name"], "configuration": {"separator": data["separator"]}} # TODO: remove this cast when we are statically typing the JSON metadata completely. - data = cast(dict[str, JSON], data) + data = cast("dict[str, JSON]", data) # configuration is optional for chunk key encodings name_parsed, config_parsed = parse_named_configuration(data, require_configuration=False) diff --git a/src/zarr/core/common.py b/src/zarr/core/common.py index 3308ca3247..3e5b619266 100644 --- a/src/zarr/core/common.py +++ b/src/zarr/core/common.py @@ -157,7 +157,7 @@ def parse_fill_value(data: Any) -> Any: def parse_order(data: Any) -> Literal["C", "F"]: if data in ("C", "F"): - return cast(Literal["C", "F"], data) + return cast("Literal['C', 'F']", data) raise ValueError(f"Expected one of ('C', 'F'), got {data} instead.") @@ -201,4 +201,4 @@ def _warn_order_kwarg() -> None: def _default_zarr_format() -> ZarrFormat: """Return the default zarr_version""" - return cast(ZarrFormat, int(zarr_config.get("default_zarr_format", 3))) + return cast("ZarrFormat", int(zarr_config.get("default_zarr_format", 3))) diff --git a/src/zarr/core/config.py b/src/zarr/core/config.py index c565cb0708..2a10943d80 100644 --- a/src/zarr/core/config.py +++ b/src/zarr/core/config.py @@ -134,6 +134,6 @@ def enable_gpu(self) -> ConfigSet: def parse_indexing_order(data: Any) -> Literal["C", "F"]: if data in ("C", "F"): - return cast(Literal["C", "F"], data) + return cast("Literal['C', 'F']", data) msg = f"Expected one of ('C', 'F'), got {data} instead." raise ValueError(msg) diff --git a/src/zarr/core/group.py b/src/zarr/core/group.py index 3f4f15b9e9..fee53be8d6 100644 --- a/src/zarr/core/group.py +++ b/src/zarr/core/group.py @@ -80,7 +80,7 @@ def parse_zarr_format(data: Any) -> ZarrFormat: """Parse the zarr_format field from metadata.""" if data in (2, 3): - return cast(ZarrFormat, data) + return cast("ZarrFormat", data) msg = f"Invalid zarr_format. Expected one of 2 or 3. Got {data}." raise ValueError(msg) @@ -88,7 +88,7 @@ def parse_zarr_format(data: Any) -> ZarrFormat: def parse_node_type(data: Any) -> NodeType: """Parse the node_type field from metadata.""" if data in ("array", "group"): - return cast(Literal["array", "group"], data) + return cast("Literal['array', 'group']", data) raise MetadataValidationError("node_type", "array or group", data) @@ -361,7 +361,7 @@ def to_buffer_dict(self, prototype: BufferPrototype) -> dict[str, Buffer]: # it's an array if isinstance(v.get("fill_value", None), np.void): v["fill_value"] = base64.standard_b64encode( - cast(bytes, v["fill_value"]) + cast("bytes", v["fill_value"]) ).decode("ascii") else: v = _replace_special_floats(v) @@ -3245,8 +3245,7 @@ def _ensure_consistent_zarr_format( raise ValueError(msg) return cast( - Mapping[str, GroupMetadata | ArrayV2Metadata] - | Mapping[str, GroupMetadata | ArrayV3Metadata], + "Mapping[str, GroupMetadata | ArrayV2Metadata] | Mapping[str, GroupMetadata | ArrayV3Metadata]", data, ) diff --git a/src/zarr/core/indexing.py b/src/zarr/core/indexing.py index 998fe156a1..c11889f7f4 100644 --- a/src/zarr/core/indexing.py +++ b/src/zarr/core/indexing.py @@ -466,7 +466,7 @@ def replace_ellipsis(selection: Any, shape: ChunkCoords) -> SelectionNormalized: # check selection not too long check_selection_length(selection, shape) - return cast(SelectionNormalized, selection) + return cast("SelectionNormalized", selection) def replace_lists(selection: SelectionNormalized) -> SelectionNormalized: @@ -481,7 +481,7 @@ def replace_lists(selection: SelectionNormalized) -> SelectionNormalized: def ensure_tuple(v: Any) -> SelectionNormalized: if not isinstance(v, tuple): v = (v,) - return cast(SelectionNormalized, v) + return cast("SelectionNormalized", v) class ChunkProjection(NamedTuple): @@ -818,7 +818,7 @@ def ix_(selection: Any, shape: ChunkCoords) -> npt.NDArray[np.intp]: # now get numpy to convert to a coordinate selection selection = np.ix_(*selection) - return cast(npt.NDArray[np.intp], selection) + return cast("npt.NDArray[np.intp]", selection) def oindex(a: npt.NDArray[Any], selection: Selection) -> npt.NDArray[Any]: @@ -948,7 +948,7 @@ def __getitem__(self, selection: OrthogonalSelection | Array) -> NDArrayLikeOrSc new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) return self.array.get_orthogonal_selection( - cast(OrthogonalSelection, new_selection), fields=fields + cast("OrthogonalSelection", new_selection), fields=fields ) def __setitem__(self, selection: OrthogonalSelection, value: npt.ArrayLike) -> None: @@ -956,7 +956,7 @@ def __setitem__(self, selection: OrthogonalSelection, value: npt.ArrayLike) -> N new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) return self.array.set_orthogonal_selection( - cast(OrthogonalSelection, new_selection), value, fields=fields + cast("OrthogonalSelection", new_selection), value, fields=fields ) @@ -1050,14 +1050,14 @@ def __getitem__(self, selection: BasicSelection) -> NDArrayLikeOrScalar: fields, new_selection = pop_fields(selection) new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) - return self.array.get_block_selection(cast(BasicSelection, new_selection), fields=fields) + return self.array.get_block_selection(cast("BasicSelection", new_selection), fields=fields) def __setitem__(self, selection: BasicSelection, value: npt.ArrayLike) -> None: fields, new_selection = pop_fields(selection) new_selection = ensure_tuple(new_selection) new_selection = replace_lists(new_selection) return self.array.set_block_selection( - cast(BasicSelection, new_selection), value, fields=fields + cast("BasicSelection", new_selection), value, fields=fields ) @@ -1105,12 +1105,12 @@ def __init__( nchunks = reduce(operator.mul, cdata_shape, 1) # some initial normalization - selection_normalized = cast(CoordinateSelectionNormalized, ensure_tuple(selection)) + selection_normalized = cast("CoordinateSelectionNormalized", ensure_tuple(selection)) selection_normalized = tuple( np.asarray([i]) if is_integer(i) else i for i in selection_normalized ) selection_normalized = cast( - CoordinateSelectionNormalized, replace_lists(selection_normalized) + "CoordinateSelectionNormalized", replace_lists(selection_normalized) ) # validation @@ -1214,8 +1214,8 @@ def __iter__(self) -> Iterator[ChunkProjection]: class MaskIndexer(CoordinateIndexer): def __init__(self, selection: MaskSelection, shape: ChunkCoords, chunk_grid: ChunkGrid) -> None: # some initial normalization - selection_normalized = cast(tuple[MaskSelection], ensure_tuple(selection)) - selection_normalized = cast(tuple[MaskSelection], replace_lists(selection_normalized)) + selection_normalized = cast("tuple[MaskSelection]", ensure_tuple(selection)) + selection_normalized = cast("tuple[MaskSelection]", replace_lists(selection_normalized)) # validation if not is_mask_selection(selection_normalized, shape): @@ -1311,14 +1311,14 @@ def pop_fields(selection: SelectionWithFields) -> tuple[Fields | None, Selection elif not isinstance(selection, tuple): # single selection item, no fields # leave selection as-is - return None, cast(Selection, selection) + return None, cast("Selection", selection) else: # multiple items, split fields from selection items fields: Fields = [f for f in selection if isinstance(f, str)] fields = fields[0] if len(fields) == 1 else fields selection_tuple = tuple(s for s in selection if not isinstance(s, str)) selection = cast( - Selection, selection_tuple[0] if len(selection_tuple) == 1 else selection_tuple + "Selection", selection_tuple[0] if len(selection_tuple) == 1 else selection_tuple ) return fields, selection @@ -1380,12 +1380,12 @@ def get_indexer( new_selection = ensure_tuple(selection) new_selection = replace_lists(new_selection) if is_coordinate_selection(new_selection, shape): - return CoordinateIndexer(cast(CoordinateSelection, selection), shape, chunk_grid) + return CoordinateIndexer(cast("CoordinateSelection", selection), shape, chunk_grid) elif is_mask_selection(new_selection, shape): - return MaskIndexer(cast(MaskSelection, selection), shape, chunk_grid) + return MaskIndexer(cast("MaskSelection", selection), shape, chunk_grid) else: raise VindexInvalidSelectionError(new_selection) elif is_pure_orthogonal_indexing(pure_selection, len(shape)): - return OrthogonalIndexer(cast(OrthogonalSelection, selection), shape, chunk_grid) + return OrthogonalIndexer(cast("OrthogonalSelection", selection), shape, chunk_grid) else: - return BasicIndexer(cast(BasicSelection, selection), shape, chunk_grid) + return BasicIndexer(cast("BasicSelection", selection), shape, chunk_grid) diff --git a/src/zarr/core/metadata/v2.py b/src/zarr/core/metadata/v2.py index d19193963f..700bbd7037 100644 --- a/src/zarr/core/metadata/v2.py +++ b/src/zarr/core/metadata/v2.py @@ -374,7 +374,7 @@ def _serialize_fill_value(fill_value: Any, dtype: np.dtype[Any]) -> JSON: # There's a relationship between dtype and fill_value # that mypy isn't aware of. The fact that we have S or V dtype here # means we should have a bytes-type fill_value. - serialized = base64.standard_b64encode(cast(bytes, fill_value)).decode("ascii") + serialized = base64.standard_b64encode(cast("bytes", fill_value)).decode("ascii") elif isinstance(fill_value, np.datetime64): serialized = np.datetime_as_string(fill_value) elif isinstance(fill_value, numbers.Integral): @@ -444,7 +444,7 @@ def _default_compressor( else: raise ValueError(f"Unsupported dtype kind {dtype.kind}") - return cast(dict[str, JSON] | None, default_compressor.get(dtype_key, None)) + return cast("dict[str, JSON] | None", default_compressor.get(dtype_key, None)) def _default_filters( @@ -466,4 +466,4 @@ def _default_filters( else: raise ValueError(f"Unsupported dtype kind {dtype.kind}") - return cast(list[dict[str, JSON]] | None, default_filters.get(dtype_key, None)) + return cast("list[dict[str, JSON]] | None", default_filters.get(dtype_key, None)) diff --git a/src/zarr/core/metadata/v3.py b/src/zarr/core/metadata/v3.py index 9154762648..590958a3e3 100644 --- a/src/zarr/core/metadata/v3.py +++ b/src/zarr/core/metadata/v3.py @@ -272,7 +272,7 @@ def __init__( fill_value = default_fill_value(data_type_parsed) # we pass a string here rather than an enum to make mypy happy fill_value_parsed = parse_fill_value( - fill_value, dtype=cast(ALL_DTYPES, data_type_parsed.value) + fill_value, dtype=cast("ALL_DTYPES", data_type_parsed.value) ) attributes_parsed = parse_attributes(attributes) codecs_parsed_partial = parse_codecs(codecs) @@ -523,7 +523,7 @@ def parse_fill_value( return np.bytes_(fill_value) # the rest are numeric types - np_dtype = cast(np.dtype[Any], data_type.to_numpy()) + np_dtype = cast("np.dtype[Any]", data_type.to_numpy()) if isinstance(fill_value, Sequence) and not isinstance(fill_value, str): if data_type in (DataType.complex64, DataType.complex128): @@ -587,7 +587,7 @@ def default_fill_value(dtype: DataType) -> str | bytes | np.generic: return b"" else: np_dtype = dtype.to_numpy() - np_dtype = cast(np.dtype[Any], np_dtype) + np_dtype = cast("np.dtype[Any]", np_dtype) return np_dtype.type(0) # type: ignore[misc] diff --git a/src/zarr/core/strings.py b/src/zarr/core/strings.py index ffca0c3b0c..15c5fddfee 100644 --- a/src/zarr/core/strings.py +++ b/src/zarr/core/strings.py @@ -30,7 +30,7 @@ def cast_array( data: np.ndarray[Any, np.dtype[Any]], ) -> np.ndarray[Any, np.dtypes.StringDType | np.dtypes.ObjectDType]: out = data.astype(_STRING_DTYPE, copy=False) - return cast(np.ndarray[Any, np.dtypes.StringDType], out) + return cast("np.ndarray[Any, np.dtypes.StringDType]", out) except AttributeError: # if not available, we fall back on an object array of strings, as in Zarr < 3 @@ -41,7 +41,7 @@ def cast_array( data: np.ndarray[Any, np.dtype[Any]], ) -> np.ndarray[Any, Union["np.dtypes.StringDType", "np.dtypes.ObjectDType"]]: out = data.astype(_STRING_DTYPE, copy=False) - return cast(np.ndarray[Any, np.dtypes.ObjectDType], out) + return cast("np.ndarray[Any, np.dtypes.ObjectDType]", out) def cast_to_string_dtype( diff --git a/src/zarr/testing/utils.py b/src/zarr/testing/utils.py index 0a93b93fdb..8a2f9d78a2 100644 --- a/src/zarr/testing/utils.py +++ b/src/zarr/testing/utils.py @@ -31,7 +31,7 @@ def has_cupy() -> bool: try: import cupy - return cast(bool, cupy.cuda.runtime.getDeviceCount() > 0) + return cast("bool", cupy.cuda.runtime.getDeviceCount() > 0) except ImportError: return False except cupy.cuda.runtime.CUDARuntimeError: @@ -44,7 +44,7 @@ def has_cupy() -> bool: # Decorator for GPU tests def gpu_test(func: T_Callable) -> T_Callable: return cast( - T_Callable, + "T_Callable", pytest.mark.gpu( pytest.mark.skipif(not has_cupy(), reason="CuPy not installed or no GPU available")( func