Skip to content

Commit

Permalink
Merge branch 'v3' into fix-prop-test-fillvalue
Browse files Browse the repository at this point in the history
  • Loading branch information
dcherian authored Sep 25, 2024
2 parents a49bd32 + fafd0bf commit b387820
Show file tree
Hide file tree
Showing 606 changed files with 215 additions and 29,318 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/releases.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ jobs:
with:
name: releases
path: dist
- uses: pypa/[email protected].1
- uses: pypa/[email protected].2
with:
user: __token__
password: ${{ secrets.pypi_password }}
Expand Down
28 changes: 23 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -207,18 +207,36 @@ extend-exclude = [

[tool.ruff.lint]
extend-select = [
"B", # flake8-bugbear
"I", # isort
"ISC",
"UP", # pyupgrade
"RSE",
"B", # flake8-bugbear
"I", # isort
"ISC", # flake8-implicit-str-concat
"PGH", # pygrep-hooks
"PYI", # flake8-pyi
"RSE", # flake8-raise
"RUF",
"TCH", # flake8-type-checking
"TRY", # tryceratops
"UP", # pyupgrade
]
ignore = [
"PYI013",
"RUF005",
"TRY003",
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
]

[tool.mypy]
Expand Down
10 changes: 8 additions & 2 deletions src/zarr/abc/store.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from abc import ABC, abstractmethod
from asyncio import gather
from collections.abc import AsyncGenerator, Iterable
from types import TracebackType
from typing import Any, NamedTuple, Protocol, runtime_checkable

from typing_extensions import Self
Expand Down Expand Up @@ -35,7 +36,7 @@ class Store(ABC):
_mode: AccessMode
_is_open: bool

def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any):
def __init__(self, mode: AccessModeLiteral = "r", *args: Any, **kwargs: Any) -> None:
self._is_open = False
self._mode = AccessMode.from_literal(mode)

Expand All @@ -49,7 +50,12 @@ def __enter__(self) -> Self:
"""Enter a context manager that will close the store upon exiting."""
return self

def __exit__(self, *args: Any) -> None:
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""Close the store."""
self.close()

Expand Down
10 changes: 5 additions & 5 deletions src/zarr/api/asynchronous.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Literal, Union, cast
from typing import TYPE_CHECKING, Any, Literal, cast

import numpy as np
import numpy.typing as npt
Expand All @@ -25,6 +25,10 @@
from zarr.core.buffer import NDArrayLike
from zarr.core.chunk_key_encodings import ChunkKeyEncoding

# TODO: this type could use some more thought
ArrayLike = AsyncArray | Array | npt.NDArray[Any]
PathLike = str

__all__ = [
"consolidate_metadata",
"copy",
Expand Down Expand Up @@ -53,10 +57,6 @@
"zeros_like",
]

# TODO: this type could use some more thought, noqa to avoid "Variable "asynchronous.ArrayLike" is not valid as a type"
ArrayLike = Union[AsyncArray | Array | npt.NDArray[Any]] # noqa
PathLike = str


def _get_shape_chunks(a: ArrayLike | Any) -> tuple[ChunkCoords | None, ChunkCoords | None]:
"""helper function to get the shape and chunks from an array-like object"""
Expand Down
12 changes: 6 additions & 6 deletions src/zarr/codecs/_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,18 @@

from zarr.abc.codec import ArrayArrayCodec, ArrayBytesCodec
from zarr.core.buffer import Buffer, NDBuffer, default_buffer_prototype
from zarr.core.common import JSON, to_thread
from zarr.core.common import to_thread
from zarr.registry import get_ndbuffer_class

if TYPE_CHECKING:
import numcodecs.abc

from zarr.core.array_spec import ArraySpec


@dataclass(frozen=True)
class V2Compressor(ArrayBytesCodec):
compressor: dict[str, JSON] | None
compressor: numcodecs.abc.Codec | None

is_fixed_size = False

Expand All @@ -27,9 +29,8 @@ async def _decode_single(
chunk_spec: ArraySpec,
) -> NDBuffer:
if self.compressor is not None:
compressor = numcodecs.get_codec(self.compressor)
chunk_numpy_array = ensure_ndarray(
await to_thread(compressor.decode, chunk_bytes.as_array_like())
await to_thread(self.compressor.decode, chunk_bytes.as_array_like())
)
else:
chunk_numpy_array = ensure_ndarray(chunk_bytes.as_array_like())
Expand All @@ -47,14 +48,13 @@ async def _encode_single(
) -> Buffer | None:
chunk_numpy_array = chunk_array.as_numpy_array()
if self.compressor is not None:
compressor = numcodecs.get_codec(self.compressor)
if (
not chunk_numpy_array.flags.c_contiguous
and not chunk_numpy_array.flags.f_contiguous
):
chunk_numpy_array = chunk_numpy_array.copy(order="A")
encoded_chunk_bytes = ensure_bytes(
await to_thread(compressor.encode, chunk_numpy_array)
await to_thread(self.compressor.encode, chunk_numpy_array)
)
else:
encoded_chunk_bytes = ensure_bytes(chunk_numpy_array)
Expand Down
6 changes: 2 additions & 4 deletions src/zarr/codecs/transpose.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,16 +96,14 @@ async def _decode_single(
chunk_spec: ArraySpec,
) -> NDBuffer:
inverse_order = np.argsort(self.order)
chunk_array = chunk_array.transpose(inverse_order)
return chunk_array
return chunk_array.transpose(inverse_order)

async def _encode_single(
self,
chunk_array: NDBuffer,
_chunk_spec: ArraySpec,
) -> NDBuffer | None:
chunk_array = chunk_array.transpose(self.order)
return chunk_array
return chunk_array.transpose(self.order)

def compute_encoded_size(self, input_byte_length: int, _chunk_spec: ArraySpec) -> int:
return input_byte_length
Expand Down
7 changes: 3 additions & 4 deletions src/zarr/core/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def __init__(
metadata: ArrayMetadata,
store_path: StorePath,
order: Literal["C", "F"] | None = None,
):
) -> None:
metadata_parsed = parse_array_metadata(metadata)
order_parsed = parse_indexing_order(order or config.get("array.order"))

Expand Down Expand Up @@ -287,7 +287,7 @@ async def _create_v2(
dtype: npt.DTypeLike,
chunks: ChunkCoords,
dimension_separator: Literal[".", "/"] | None = None,
fill_value: None | int | float = None,
fill_value: None | float = None,
order: Literal["C", "F"] | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
Expand Down Expand Up @@ -324,8 +324,7 @@ def from_dict(
data: dict[str, JSON],
) -> AsyncArray:
metadata = parse_array_metadata(data)
async_array = cls(metadata=metadata, store_path=store_path)
return async_array
return cls(metadata=metadata, store_path=store_path)

@classmethod
async def open(
Expand Down
7 changes: 4 additions & 3 deletions src/zarr/core/array_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Literal

import numpy as np

from zarr.core.common import parse_fill_value, parse_order, parse_shapelike

if TYPE_CHECKING:
import numpy as np

from zarr.core.buffer import BufferPrototype
from zarr.core.common import ChunkCoords

Expand All @@ -29,11 +29,12 @@ def __init__(
prototype: BufferPrototype,
) -> None:
shape_parsed = parse_shapelike(shape)
dtype_parsed = np.dtype(dtype)
fill_value_parsed = parse_fill_value(fill_value)
order_parsed = parse_order(order)

object.__setattr__(self, "shape", shape_parsed)
object.__setattr__(self, "dtype", dtype)
object.__setattr__(self, "dtype", dtype_parsed)
object.__setattr__(self, "fill_value", fill_value_parsed)
object.__setattr__(self, "order", order_parsed)
object.__setattr__(self, "prototype", prototype)
Expand Down
5 changes: 4 additions & 1 deletion src/zarr/core/attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


class Attributes(MutableMapping[str, JSON]):
def __init__(self, obj: Array | Group):
def __init__(self, obj: Array | Group) -> None:
# key=".zattrs", read_only=False, cache=True, synchronizer=None
self._obj = obj

Expand Down Expand Up @@ -51,3 +51,6 @@ def put(self, d: dict[str, JSON]) -> None:
{'a': 3, 'c': 4}
"""
self._obj = self._obj.update_attributes(d)

def asdict(self) -> dict[str, JSON]:
return dict(self._obj.metadata.attributes)
6 changes: 3 additions & 3 deletions src/zarr/core/buffer/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def ravel(self, order: Literal["K", "A", "C", "F"] = ...) -> Self: ...

def all(self) -> bool: ...

def __eq__(self, other: Any) -> Self: # type: ignore[explicit-override, override]
def __eq__(self, other: object) -> Self: # type: ignore[explicit-override, override]
"""Element-wise equal
Notes
Expand Down Expand Up @@ -136,7 +136,7 @@ class Buffer(ABC):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
if array_like.ndim != 1:
raise ValueError("array_like: only 1-dim allowed")
if array_like.dtype != np.dtype("b"):
Expand Down Expand Up @@ -313,7 +313,7 @@ class NDBuffer:
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
# assert array.ndim > 0
assert array.dtype != object
self._data = array
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class Buffer(core.Buffer):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
super().__init__(array_like)

@classmethod
Expand Down Expand Up @@ -143,7 +143,7 @@ class NDBuffer(core.NDBuffer):
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
super().__init__(array)

@classmethod
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/buffer/gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class Buffer(core.Buffer):
array-like object that must be 1-dim, contiguous, and byte dtype.
"""

def __init__(self, array_like: ArrayLike):
def __init__(self, array_like: ArrayLike) -> None:
if cp is None:
raise ImportError(
"Cannot use zarr.buffer.gpu.Buffer without cupy. Please install cupy."
Expand Down Expand Up @@ -137,7 +137,7 @@ class NDBuffer(core.NDBuffer):
ndarray-like object that is convertible to a regular Numpy array.
"""

def __init__(self, array: NDArrayLike):
def __init__(self, array: NDArrayLike) -> None:
if cp is None:
raise ImportError(
"Cannot use zarr.buffer.gpu.NDBuffer without cupy. Please install cupy."
Expand Down
12 changes: 6 additions & 6 deletions src/zarr/core/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def parse_zarr_format(data: Any) -> ZarrFormat:
def parse_attributes(data: Any) -> dict[str, Any]:
if data is None:
return {}
elif isinstance(data, dict) and all(map(lambda v: isinstance(v, str), data.keys())):
elif isinstance(data, dict) and all(isinstance(k, str) for k in data):
return data
msg = f"Expected dict with string keys. Got {type(data)} instead."
raise TypeError(msg)
Expand Down Expand Up @@ -104,7 +104,9 @@ def to_buffer_dict(self, prototype: BufferPrototype) -> dict[str, Buffer]:
),
}

def __init__(self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3):
def __init__(
self, attributes: dict[str, Any] | None = None, zarr_format: ZarrFormat = 3
) -> None:
attributes_parsed = parse_attributes(attributes)
zarr_format_parsed = parse_zarr_format(zarr_format)

Expand Down Expand Up @@ -202,11 +204,10 @@ def from_dict(
store_path: StorePath,
data: dict[str, Any],
) -> AsyncGroup:
group = cls(
return cls(
metadata=GroupMetadata.from_dict(data),
store_path=store_path,
)
return group

async def getitem(
self,
Expand Down Expand Up @@ -888,8 +889,7 @@ def members(self, max_depth: int | None = 0) -> tuple[tuple[str, Array | Group],
"""
_members = self._sync_iter(self._async_group.members(max_depth=max_depth))

result = tuple(map(lambda kv: (kv[0], _parse_async_node(kv[1])), _members))
return result
return tuple((kv[0], _parse_async_node(kv[1])) for kv in _members)

def __contains__(self, member: str) -> bool:
return self._sync(self._async_group.contains(member))
Expand Down
Loading

0 comments on commit b387820

Please sign in to comment.