Skip to content

Commit

Permalink
Merge branch 'main' into feature/append-v3
Browse files Browse the repository at this point in the history
  • Loading branch information
jhamman authored Oct 23, 2024
2 parents 9095e04 + 8a33df7 commit 1ef58f8
Show file tree
Hide file tree
Showing 17 changed files with 248 additions and 67 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,35 @@ jobs:
- name: Run Tests
run: |
hatch env run --env test.py${{ matrix.python-version }}-${{ matrix.numpy-version }}-${{ matrix.dependency-set }} run
test-upstream-and-min-deps:
name: py=${{ matrix.python-version }}-${{ matrix.dependency-set }}

runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.11', "3.13"]
dependency-set: ["upstream", "min_deps"]
exclude:
- python-version: "3.13"
dependency-set: min_deps
- python-version: "3.11"
dependency-set: upstream
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install Hatch
run: |
python -m pip install --upgrade pip
pip install hatch
- name: Set Up Hatch Env
run: |
hatch env create ${{ matrix.dependency-set }}
hatch env run -e ${{ matrix.dependency-set }} list-env
- name: Run Tests
run: |
hatch env run --env ${{ matrix.dependency-set }} run
4 changes: 0 additions & 4 deletions .pep8speaks.yml

This file was deleted.

4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ default_language_version:
python: python3
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.9
rev: v0.7.0
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
Expand All @@ -22,7 +22,7 @@ repos:
hooks:
- id: check-yaml
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.2
rev: v1.12.1
hooks:
- id: mypy
files: src|tests
Expand Down
2 changes: 1 addition & 1 deletion docs/guide/storage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ that implements the `AbstractFileSystem` API,
.. code-block:: python
>>> import zarr
>>> store = zarr.storage.RemoteStore("gs://foo/bar", mode="r")
>>> store = zarr.storage.RemoteStore.from_url("gs://foo/bar", mode="r")
>>> zarr.open(store=store)
<Array <RemoteStore(GCSFileSystem, foo/bar)> shape=(10, 20) dtype=float32>
Expand Down
72 changes: 66 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,13 @@ requires-python = ">=3.11"
dependencies = [
'asciitree',
'numpy>=1.25',
'numcodecs>=0.10.2',
'fsspec>2024',
'crc32c',
'typing_extensions',
'donfig',
'numcodecs>=0.13',
'fsspec>=2022.10.0',
'crc32c>=2.3',
'typing_extensions>=4.6',
'donfig>=0.8',
]

dynamic = [
"version",
]
Expand Down Expand Up @@ -98,7 +99,7 @@ extra = [
]
optional = [
'lmdb',
'universal-pathlib',
'universal-pathlib>=0.0.22',
]

[project.urls]
Expand Down Expand Up @@ -183,6 +184,65 @@ features = ['docs']
build = "cd docs && make html"
serve = "sphinx-autobuild docs docs/_build --host 0.0.0.0"

[tool.hatch.envs.upstream]
python = "3.13"
dependencies = [
'numpy', # from scientific-python-nightly-wheels
'numcodecs @ git+https://github.com/zarr-developers/numcodecs',
'fsspec @ git+https://github.com/fsspec/filesystem_spec',
's3fs @ git+https://github.com/fsspec/s3fs',
'universal_pathlib @ git+https://github.com/fsspec/universal_pathlib',
'crc32c @ git+https://github.com/ICRAR/crc32c',
'typing_extensions @ git+https://github.com/python/typing_extensions',
'donfig @ git+https://github.com/pytroll/donfig',
# test deps
'hypothesis',
'pytest',
'pytest-cov',
'pytest-asyncio',
'moto[s3]',
]

[tool.hatch.envs.upstream.env-vars]
PIP_INDEX_URL = "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/"
PIP_EXTRA_INDEX_URL = "https://pypi.org/simple/"
PIP_PRE = "1"

[tool.hatch.envs.upstream.scripts]
run = "pytest --verbose"
run-mypy = "mypy src"
run-hypothesis = "pytest --hypothesis-profile ci tests/test_properties.py tests/test_store/test_stateful*"
list-env = "pip list"

[tool.hatch.envs.min_deps]
description = """Test environment for minimum supported dependencies
See Spec 0000 for details and drop schedule: https://scientific-python.org/specs/spec-0000/
"""
python = "3.11"
dependencies = [
'numpy==1.25.*',
'numcodecs==0.13.*', # 0.13 needed for? (should be 0.11)
'fsspec==2022.10.0',
's3fs==2022.10.0',
'universal_pathlib==0.0.22',
'crc32c==2.3.*',
'typing_extensions==4.6.*', # 4.5 needed for @deprecated, 4.6 for Buffer
'donfig==0.8.*',
# test deps
'hypothesis',
'pytest',
'pytest-cov',
'pytest-asyncio',
'moto[s3]',
]

[tool.hatch.envs.min_deps.scripts]
run = "pytest --verbose"
run-hypothesis = "pytest --hypothesis-profile ci tests/test_properties.py tests/test_store/test_stateful*"
list-env = "pip list"


[tool.ruff]
line-length = 100
force-exclude = true
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/abc/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def with_mode(self, mode: AccessModeLiteral) -> Self:
Returns
-------
store:
store
A new store of the same type with the new mode.
Examples
Expand Down
10 changes: 3 additions & 7 deletions src/zarr/api/asynchronous.py
Original file line number Diff line number Diff line change
Expand Up @@ -712,7 +712,7 @@ async def create(
dtype: npt.DTypeLike | None = None,
compressor: dict[str, JSON] | None = None, # TODO: default and type change
fill_value: Any | None = 0, # TODO: need type
order: MemoryOrder | None = None, # TODO: default change
order: MemoryOrder | None = None,
store: str | StoreLike | None = None,
synchronizer: Any | None = None,
overwrite: bool = False,
Expand Down Expand Up @@ -761,6 +761,7 @@ async def create(
Default value to use for uninitialized portions of the array.
order : {'C', 'F'}, optional
Memory layout to be used within each chunk.
Default is set in Zarr's config (`array.order`).
store : Store or str
Store or path to directory in file system or name of zip file.
synchronizer : object, optional
Expand Down Expand Up @@ -834,12 +835,6 @@ async def create(
else:
chunk_shape = shape

if order is not None:
warnings.warn(
"order is deprecated, use config `array.order` instead",
DeprecationWarning,
stacklevel=2,
)
if synchronizer is not None:
warnings.warn("synchronizer is not yet implemented", RuntimeWarning, stacklevel=2)
if chunk_store is not None:
Expand Down Expand Up @@ -889,6 +884,7 @@ async def create(
codecs=codecs,
dimension_names=dimension_names,
attributes=attributes,
order=order,
**kwargs,
)

Expand Down
7 changes: 4 additions & 3 deletions src/zarr/codecs/zstd.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
import asyncio
from dataclasses import dataclass
from functools import cached_property
from importlib.metadata import version
from typing import TYPE_CHECKING

import numcodecs
from numcodecs.zstd import Zstd
from packaging.version import Version

from zarr.abc.codec import BytesBytesCodec
from zarr.core.buffer.cpu import as_numpy_array_wrapper
Expand Down Expand Up @@ -43,8 +44,8 @@ class ZstdCodec(BytesBytesCodec):

def __init__(self, *, level: int = 0, checksum: bool = False) -> None:
# numcodecs 0.13.0 introduces the checksum attribute for the zstd codec
_numcodecs_version = tuple(map(int, version("numcodecs").split(".")))
if _numcodecs_version < (0, 13, 0): # pragma: no cover
_numcodecs_version = Version(numcodecs.__version__)
if _numcodecs_version < Version("0.13.0"):
raise RuntimeError(
"numcodecs version >= 0.13.0 is required to use the zstd codec. "
f"Version {_numcodecs_version} is currently installed."
Expand Down
35 changes: 17 additions & 18 deletions src/zarr/core/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
ZARRAY_JSON,
ZATTRS_JSON,
ChunkCoords,
MemoryOrder,
ShapeLike,
ZarrFormat,
concurrent_map,
Expand Down Expand Up @@ -203,29 +204,29 @@ class AsyncArray(Generic[T_ArrayMetadata]):
metadata: T_ArrayMetadata
store_path: StorePath
codec_pipeline: CodecPipeline = field(init=False)
order: Literal["C", "F"]
order: MemoryOrder

@overload
def __init__(
self: AsyncArray[ArrayV2Metadata],
metadata: ArrayV2Metadata | ArrayV2MetadataDict,
store_path: StorePath,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
) -> None: ...

@overload
def __init__(
self: AsyncArray[ArrayV3Metadata],
metadata: ArrayV3Metadata | ArrayV3MetadataDict,
store_path: StorePath,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
) -> None: ...

def __init__(
self,
metadata: ArrayMetadata | ArrayMetadataDict,
store_path: StorePath,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
) -> None:
if isinstance(metadata, dict):
zarr_format = metadata["zarr_format"]
Expand Down Expand Up @@ -261,7 +262,7 @@ async def create(
attributes: dict[str, JSON] | None = None,
chunks: ShapeLike | None = None,
dimension_separator: Literal[".", "/"] | None = None,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
# runtime
Expand Down Expand Up @@ -350,7 +351,7 @@ async def create(
# v2 only
chunks: ShapeLike | None = None,
dimension_separator: Literal[".", "/"] | None = None,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
# runtime
Expand Down Expand Up @@ -382,7 +383,7 @@ async def create(
# v2 only
chunks: ShapeLike | None = None,
dimension_separator: Literal[".", "/"] | None = None,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
# runtime
Expand Down Expand Up @@ -422,7 +423,6 @@ async def create(
V2 only. V3 arrays cannot have a dimension separator.
order : Literal["C", "F"], optional
The order of the array (default is None).
V2 only. V3 arrays should not have 'order' parameter.
filters : list[dict[str, JSON]], optional
The filters used to compress the data (default is None).
V2 only. V3 arrays should not have 'filters' parameter.
Expand Down Expand Up @@ -471,10 +471,6 @@ async def create(
raise ValueError(
"dimension_separator cannot be used for arrays with version 3. Use chunk_key_encoding instead."
)
if order is not None:
raise ValueError(
"order cannot be used for arrays with version 3. Use a transpose codec instead."
)
if filters is not None:
raise ValueError(
"filters cannot be used for arrays with version 3. Use array-to-array codecs instead."
Expand All @@ -494,6 +490,7 @@ async def create(
dimension_names=dimension_names,
attributes=attributes,
exists_ok=exists_ok,
order=order,
)
elif zarr_format == 2:
if dtype is str or dtype == "str":
Expand Down Expand Up @@ -545,6 +542,7 @@ async def _create_v3(
dtype: npt.DTypeLike,
chunk_shape: ChunkCoords,
fill_value: Any | None = None,
order: MemoryOrder | None = None,
chunk_key_encoding: (
ChunkKeyEncoding
| tuple[Literal["default"], Literal[".", "/"]]
Expand Down Expand Up @@ -588,7 +586,7 @@ async def _create_v3(
attributes=attributes or {},
)

array = cls(metadata=metadata, store_path=store_path)
array = cls(metadata=metadata, store_path=store_path, order=order)
await array._save_metadata(metadata, ensure_parents=True)
return array

Expand All @@ -602,16 +600,17 @@ async def _create_v2(
chunks: ChunkCoords,
dimension_separator: Literal[".", "/"] | None = None,
fill_value: None | float = None,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
attributes: dict[str, JSON] | None = None,
exists_ok: bool = False,
) -> AsyncArray[ArrayV2Metadata]:
if not exists_ok:
await ensure_no_existing_node(store_path, zarr_format=2)

if order is None:
order = "C"
order = parse_indexing_order(config.get("array.order"))

if dimension_separator is None:
dimension_separator = "."
Expand All @@ -627,7 +626,7 @@ async def _create_v2(
filters=filters,
attributes=attributes,
)
array = cls(metadata=metadata, store_path=store_path)
array = cls(metadata=metadata, store_path=store_path, order=order)
await array._save_metadata(metadata, ensure_parents=True)
return array

Expand Down Expand Up @@ -1236,7 +1235,7 @@ def create(
# v2 only
chunks: ChunkCoords | None = None,
dimension_separator: Literal[".", "/"] | None = None,
order: Literal["C", "F"] | None = None,
order: MemoryOrder | None = None,
filters: list[dict[str, JSON]] | None = None,
compressor: dict[str, JSON] | None = None,
# runtime
Expand Down Expand Up @@ -1432,7 +1431,7 @@ def store_path(self) -> StorePath:
return self._async_array.store_path

@property
def order(self) -> Literal["C", "F"]:
def order(self) -> MemoryOrder:
return self._async_array.order

@property
Expand Down
Loading

0 comments on commit 1ef58f8

Please sign in to comment.