Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
- run: pipx run check-manifest

test:
name: ${{ matrix.platform }} (${{ matrix.python-version }}) [${{ matrix.resolution }}]
name: ${{ matrix.platform }} (${{ matrix.python-version }}) [${{ matrix.resolution }}] ${{ matrix.extra && format('with extra {0}', matrix.extra) || '' }}
runs-on: ${{ matrix.platform }}
env:
UV_PRERELEASE: ${{ github.event_name == 'schedule' && 'allow' || 'if-necessary-or-explicit' }}
Expand All @@ -45,6 +45,14 @@ jobs:
- python-version: "3.12"
resolution: "lowest-direct"
platform: ubuntu-latest
- python-version: "3.13"
resolution: "highest"
platform: ubuntu-latest
extra: ome-zarr
- python-version: "3.13"
resolution: "highest"
platform: ubuntu-latest
extra: ome-zarr-tensorstore

steps:
- uses: actions/checkout@v6
Expand All @@ -66,7 +74,7 @@ jobs:
run: uv run scripts/download_samples.py

- name: 🧪 Run Tests
run: uv run --no-dev --group test coverage run -p -m pytest -v
run: uv run --no-dev --group test ${{ matrix.extra && format('--extra {0}', matrix.extra) || '' }} coverage run -p -m pytest -v

# If something goes wrong with --pre tests, we can open an issue in the repo
- name: 📝 Report --pre Failures
Expand All @@ -85,7 +93,7 @@ jobs:
- name: Upload coverage
uses: actions/upload-artifact@v5
with:
name: covreport-${{ matrix.platform }}-py${{ matrix.python-version }}-${{ matrix.resolution }}
name: covreport-${{ matrix.platform }}-py${{ matrix.python-version }}-${{ matrix.resolution }}-${{ matrix.extra || 'noextra' }}
path: ./.coverage*
include-hidden-files: true

Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -117,5 +117,7 @@ src/nd2/_version.py
!src/sdk/**/*.a

uv.lock
output.ome.zarr/
zarr_output/
_nd2sdk/
pyJOBS
8 changes: 8 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,14 @@ legacy = [
"imagecodecs>=2024.9.22; python_version >= '3.13'",
]
tiff = ["tifffile>=2021.7.2"]
ome-zarr = [
"yaozarrs>=0.1.1; python_version >= '3.10'",
"zarr>=3.1; python_version >= '3.11'",
]
ome-zarr-tensorstore = [
"yaozarrs>=0.1.1; python_version >= '3.10'",
"tensorstore>=0.1.45",
]

[dependency-groups]
test = [
Expand Down
97 changes: 97 additions & 0 deletions src/nd2/_nd2file.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import xarray as xr
from ome_types import OME

from nd2._ome_zarr import ZarrBackend
from nd2.jobs.types import JobsDict

from ._binary import BinaryLayers
Expand Down Expand Up @@ -939,6 +940,102 @@ def write_tiff(
modify_ome=modify_ome,
)

def to_ome_zarr(
self,
dest: str | PathLike,
*,
chunk_shape: tuple[int, ...] | Literal["auto"] | None = "auto",
shard_shape: tuple[int, ...] | None = None,
backend: ZarrBackend = "auto",
progress: bool = False,
position: int | None = None,
force_series: bool = False,
version: Literal["0.5"] = "0.5",
) -> Path:
"""Export to an OME-Zarr store.

Creates a Zarr v3 store with OME-NGFF 0.5 compliant metadata.
Uses yaozarrs for metadata generation and either zarr-python or
tensorstore for array writing.

Parameters
----------
dest : str | PathLike
Destination path for the Zarr store. Will be created as a directory.
chunk_shape : tuple[int, ...] | "auto" | None
Shape of chunks for the output array. If "auto" (default), determines
optimal chunking based on data size. If None, uses a single chunk.
shard_shape : tuple[int, ...] | None
Shape of shards for sharded storage. If provided, enables Zarr v3
sharding where each shard contains multiple chunks. Useful for
cloud storage to reduce number of objects.
backend : "zarr" | "tensorstore" | "auto"
Backend library to use for writing arrays.
- "tensorstore": Uses Google's tensorstore library
- "zarr": Uses zarr-python
- "auto": Tries to use tensorstore if installed, otherwise falls back
to zarr-python. Raises ImportError if neither is available.
progress : bool
Whether to display a progress bar during writing.
position : int | None
If the ND2 file contains multiple positions (XY stage positions),
export only this position index. If None, exports all positions
as separate groups within the store.
force_series : bool
If True, use bioformats2raw layout even for single position files.
This creates a store with OME/ directory and series metadata,
with the image in a "0/" subdirectory. Default is False.
version : "0.5"
OME-NGFF specification version to use. Currently only "0.5" is
supported. This parameter is reserved for future use.

Returns
-------
Path
Path to the created Zarr store.

Raises
------
ImportError
If yaozarrs or the required backend library is not installed.
ValueError
If the file contains unsupported data structures or invalid version.

Examples
--------
Basic export:

>>> import nd2
>>> with nd2.ND2File("experiment.nd2") as f:
... f.to_ome_zarr("experiment.zarr")

Export with specific chunking:

>>> with nd2.ND2File("experiment.nd2") as f:
... f.to_ome_zarr(
... "experiment.zarr",
... chunk_shape=(1, 1, 64, 256, 256),
... )

Export using tensorstore backend:

>>> with nd2.ND2File("experiment.nd2") as f:
... f.to_ome_zarr("experiment.zarr", backend="tensorstore")
"""
from ._ome_zarr import nd2_to_ome_zarr

return nd2_to_ome_zarr(
self,
dest,
chunk_shape=chunk_shape,
shard_shape=shard_shape,
backend=backend,
progress=progress,
position=position,
force_series=force_series,
version=version,
)

def to_dask(self, wrapper: bool = True, copy: bool = True) -> dask.array.core.Array:
"""Create dask array (delayed reader) representing image.

Expand Down
Loading
Loading