diff --git a/examples/maplibre/tiles.html b/examples/maplibre/tiles.html
index 03ad013..d043876 100644
--- a/examples/maplibre/tiles.html
+++ b/examples/maplibre/tiles.html
@@ -61,7 +61,7 @@
type: "raster",
// use the tiles option to specify a raster tile source URL
// https://docs.mapbox.com/style-spec/reference/sources/
- url: "http://localhost:8080/tiles/WebMercatorQuad/tilejson.json?variables=gust&width=512&height=512&style=raster/viridis&colorscalerange=0,30",
+ url: "http://localhost:8080/tiles/WebMercatorQuad/tilejson.json?variables=foo&width=512&height=512&style=raster/viridis&colorscalerange=-1,1",
tileSize: 512,
});
map.addLayer(
diff --git a/pyproject.toml b/pyproject.toml
index 8da3bd8..62d9801 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -88,6 +88,7 @@ dev = [
"xpublish-tiles[testing]",
"coiled>=1.118.3",
"memray>=1.18.0",
+ "pre-commit>=4.5.1",
]
[project.entry-points."xpublish.plugin"]
diff --git a/src/xpublish_tiles/pipeline.py b/src/xpublish_tiles/pipeline.py
index 7e1c0f8..d7e6d89 100644
--- a/src/xpublish_tiles/pipeline.py
+++ b/src/xpublish_tiles/pipeline.py
@@ -12,6 +12,7 @@
import xarray as xr
from xpublish_tiles.config import config
from xpublish_tiles.grids import (
+ Curvilinear,
GridMetadata,
GridSystem,
GridSystem2D,
@@ -426,13 +427,22 @@ def coarsen(
# a discontinuity at the anti-meridian; which we end up averaging over below.
# So fix that here.
if grid.lon_spans_globe:
- newX = fix_coordinate_discontinuities(
- da[grid.X].data,
- # FIXME: test 0->360 also!
- transformer_from_crs(grid.crs, grid.crs),
- axis=da[grid.X].get_axis_num(grid.Xdim),
- bbox=grid.bbox,
- )
+ if grid.Xdim in coarsen_factors:
+ newX = fix_coordinate_discontinuities(
+ da[grid.X].data,
+ # FIXME: test 0->360 also!
+ transformer_from_crs(grid.crs, grid.crs),
+ axis=da[grid.X].get_axis_num(grid.Xdim),
+ bbox=grid.bbox,
+ )
+ if grid.Ydim in coarsen_factors:
+ newX = fix_coordinate_discontinuities(
+ da[grid.X].data,
+ # FIXME: test 0->360 also!
+ transformer_from_crs(grid.crs, grid.crs),
+ axis=da[grid.X].get_axis_num(grid.Ydim),
+ bbox=grid.bbox,
+ )
da = da.assign_coords({grid.X: da[grid.X].copy(data=newX)})
with NUMBA_THREADING_LOCK:
coarsened = da.coarsen(coarsen_factors, boundary="pad").mean() # type: ignore[unresolved-attribute]
@@ -854,6 +864,7 @@ def subset_to_bbox(
left=bbox.west, right=bbox.east, top=bbox.north, bottom=bbox.south
)
if grid.crs.is_geographic:
+ # Handle antimeridian crossing: west > east means bbox crosses -180/180
west = west - 360 if west > east else west
input_bbox = BBox(west=west, south=south, east=east, north=north)
@@ -889,11 +900,21 @@ def subset_to_bbox(
)
)
+ has_discontinuity_x = False
+ has_discontinuity_y = False
if grid.crs.is_geographic:
if isinstance(grid, GridSystem2D):
- has_discontinuity = has_coordinate_discontinuity(
+ # Check for discontinuities along X dimension
+ has_discontinuity_x = has_coordinate_discontinuity(
subset[grid.X].data, axis=subset[grid.X].get_axis_num(grid.Xdim)
)
+ # For curvilinear grids (2D coordinates), also check along Y dimension
+ # (e.g., HYCOM grids can have discontinuities along Y at certain X indices)
+ if isinstance(grid, Curvilinear):
+ has_discontinuity_y = has_coordinate_discontinuity(
+ subset[grid.X].data, axis=subset[grid.X].get_axis_num(grid.Ydim)
+ )
+ has_discontinuity = has_discontinuity_x or has_discontinuity_y
elif isinstance(grid, Triangular):
anti = next(iter(slicers[grid.Xdim])).antimeridian_vertices
has_discontinuity = anti["pos"].size > 0 or anti["neg"].size > 0
@@ -913,13 +934,25 @@ def subset_to_bbox(
# Fix coordinate discontinuities in transformed coordinates if detected
if has_discontinuity:
if isinstance(grid, GridSystem2D):
- fixed = fix_coordinate_discontinuities(
- newX.data,
- input_to_output,
- axis=newX.get_axis_num(grid.Xdim),
- bbox=bbox,
- )
- newX = newX.copy(data=fixed)
+ # Fix discontinuities along X dimension
+ if has_discontinuity_x:
+ fixed = fix_coordinate_discontinuities(
+ newX.data,
+ input_to_output,
+ axis=newX.get_axis_num(grid.Xdim),
+ bbox=bbox,
+ )
+ newX = newX.copy(data=fixed)
+
+ # For curvilinear grids, also fix discontinuities along Y dimension
+ if has_discontinuity_y and isinstance(grid, Curvilinear):
+ fixed = fix_coordinate_discontinuities(
+ newX.data,
+ input_to_output,
+ axis=newX.get_axis_num(grid.Ydim),
+ bbox=bbox,
+ )
+ newX = newX.copy(data=fixed)
elif isinstance(grid, Triangular):
anti = next(iter(slicers[grid.dim])).antimeridian_vertices
for verts in [anti["pos"], anti["neg"]]:
diff --git a/src/xpublish_tiles/testing/datasets.py b/src/xpublish_tiles/testing/datasets.py
index ef1ed6d..7773682 100644
--- a/src/xpublish_tiles/testing/datasets.py
+++ b/src/xpublish_tiles/testing/datasets.py
@@ -1059,6 +1059,114 @@ def global_nans_grid(
)
+def _create_curvilinear_grid_like_hycom(
+ *,
+ regional_subset: bool,
+ dims: tuple[Dim, ...],
+ dtype: npt.DTypeLike,
+ attrs: dict[str, Any],
+) -> xr.Dataset:
+ """Build a global HYCOM-like curvilinear grid matching actual HYCOM/RTOFS dimensions.
+
+ Creates a simplified curvilinear grid with:
+ - Full HYCOM dimensions: 4500 (lon) x 3298 (lat)
+ - Latitude range: -80° to 90°
+ - Longitude: -180° to 180° with wraparound at antimeridian
+ - 2D coordinate arrays (curvilinear)
+ """
+ ds = uniform_grid(dims=dims, dtype=dtype, attrs=attrs)
+
+ ny, nx = ds.sizes["Y"], ds.sizes["X"]
+
+ lat_1d = np.linspace(-80.0, 90.0, ny, dtype=np.float32)
+ lon_1d = np.linspace(-180.0, 180.0, nx, dtype=np.float32)
+
+ lon_2d, lat_2d = np.meshgrid(lon_1d, lat_1d)
+
+ lat_variation = 0.1 * np.sin(2 * np.pi * np.arange(nx) / nx)
+ lat = lat_2d + lat_variation[np.newaxis, :]
+
+ lon_variation = 0.5 * np.sin(2 * np.pi * np.arange(ny) / ny)
+ lon = lon_2d + lon_variation[:, np.newaxis]
+
+ lon = ((lon + 180.0) % 360.0) - 180.0
+
+ ds["foo"] = ds["foo"].chunk(X=1000, Y=1000)
+ ds.coords["latitude"] = (
+ ["Y", "X"],
+ lat.astype(np.float32),
+ {"standard_name": "latitude", "units": "degrees_north"},
+ )
+ ds.coords["longitude"] = (
+ ["Y", "X"],
+ lon.astype(np.float32),
+ {
+ "standard_name": "longitude",
+ "units": "degrees_east",
+ "modulo": "360 degrees",
+ },
+ )
+ if regional_subset:
+ mask = (
+ (ds.longitude > -180)
+ & (ds.longitude < -120)
+ & (ds.latitude > 0)
+ & (ds.latitude < 80)
+ ).compute()
+ ds.attrs["bbox"] = BBox(west=-180.0, south=0.0, east=-120.0, north=80.0)
+ return ds.where(mask, drop=True)
+ else:
+ ds.attrs["bbox"] = BBox(west=-180.0, south=-80.0, east=180.0, north=90.0)
+ return ds
+
+
+GLOBAL_HYCOM = Dataset(
+ name="global_hycom",
+ setup=partial(_create_curvilinear_grid_like_hycom, regional_subset=False),
+ dims=(
+ Dim(
+ name="X",
+ size=4500,
+ chunk_size=4500,
+ data=np.arange(4500),
+ attrs={
+ "standard_name": "projection_x_coordinate",
+ "axis": "X",
+ "point_spacing": "even",
+ },
+ ),
+ Dim(
+ name="Y",
+ size=3298,
+ chunk_size=3298,
+ data=np.arange(3298),
+ attrs={
+ "standard_name": "projection_y_coordinate",
+ "axis": "Y",
+ "point_spacing": "even",
+ },
+ ),
+ ),
+ dtype=np.float64,
+ attrs={
+ "valid_min": 5.0,
+ "valid_max": 15.0,
+ "coordinates": "latitude longitude",
+ },
+)
+
+REGIONAL_HYCOM = Dataset(
+ name="regional_hycom",
+ setup=partial(_create_curvilinear_grid_like_hycom, regional_subset=True),
+ dims=GLOBAL_HYCOM.dims, # gets subset later
+ dtype=np.float64,
+ attrs={
+ "valid_min": 5.0,
+ "valid_max": 15.0,
+ "coordinates": "latitude longitude",
+ },
+)
+
POPDS = xr.Dataset(
{
"TEMP": (
@@ -1295,6 +1403,8 @@ def create_n320(
"utm33s_hires": UTM33S_HIRES,
"utm50s_hires": UTM50S_HIRES,
"curvilinear": CURVILINEAR,
+ "regional_hycom": REGIONAL_HYCOM,
+ "global_hycom": GLOBAL_HYCOM,
"hrrr_multiple": HRRR_MULTIPLE,
"global_nans": GLOBAL_NANS,
"redgauss_n320": REDGAUSS_N320,
diff --git a/src/xpublish_tiles/testing/tiles.py b/src/xpublish_tiles/testing/tiles.py
index 7b71c11..c201c29 100644
--- a/src/xpublish_tiles/testing/tiles.py
+++ b/src/xpublish_tiles/testing/tiles.py
@@ -304,6 +304,10 @@
(Tile(x=442, y=744, z=11), WEBMERC_TMS),
id="curvilinear_central_us_z11(11/442/744)",
),
+ # TODO: uncomment
+ # pytest.param(
+ # (Tile(x=3, y=5, z=4), WEBMERC_TMS), id="curvilinear_hycom_east_z4(4/3/5)"
+ # ),
]
# South America benchmark tiles (for Sentinel dataset)
diff --git a/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile0].png b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile0].png
new file mode 100644
index 0000000..2d5c1a3
Binary files /dev/null and b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile0].png differ
diff --git a/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile1].png b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile1].png
new file mode 100644
index 0000000..6bd54c4
Binary files /dev/null and b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile1].png differ
diff --git a/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile2].png b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile2].png
new file mode 100644
index 0000000..dcfef4d
Binary files /dev/null and b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile2].png differ
diff --git a/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile3].png b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile3].png
new file mode 100644
index 0000000..428f2a8
Binary files /dev/null and b/tests/__snapshots__/test_pipeline/test_hycom_like_grid[tile3].png differ
diff --git a/tests/conftest.py b/tests/conftest.py
index ede8aa1..f2b3603 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -10,6 +10,7 @@
CURVILINEAR,
EU3035,
EU3035_HIRES,
+ GLOBAL_HYCOM,
HRRR,
REDGAUSS_N320,
UTM33S,
@@ -119,6 +120,8 @@ def repo(pytestconfig):
@pytest.fixture(
params=tuple(map(",".join, product(["-90->90", "90->-90"], ["-180->180", "0->360"])))
+ ("reduced_gaussian_n320",)
+ # TODO: uncomment later
+ # + ("global_hycom",)
)
def global_datasets(request):
param = request.param
@@ -129,6 +132,8 @@ def global_datasets(request):
if param == "reduced_gaussian_n320":
ds = REDGAUSS_N320.create()
+ elif param in {"global_hycom"}:
+ ds = GLOBAL_HYCOM.create()
else:
ds = create_global_dataset(lat_ascending=lat_ascending, lon_0_360=lon_0_360)
ds.attrs["name"] = param
diff --git a/tests/test_grids.py b/tests/test_grids.py
index 90e252b..a610194 100644
--- a/tests/test_grids.py
+++ b/tests/test_grids.py
@@ -57,6 +57,7 @@
PARA_HIRES,
POPDS,
REDGAUSS_N320,
+ REGIONAL_HYCOM,
UTM33S_HIRES,
UTM50S_HIRES,
Dataset,
@@ -176,6 +177,32 @@
),
id="roms",
),
+ pytest.param(
+ REGIONAL_HYCOM.create(),
+ "foo",
+ Curvilinear(
+ crs=CRS.from_user_input(4326),
+ bbox=BBox(
+ south=0,
+ north=80,
+ east=-120,
+ west=-180,
+ ),
+ X="longitude",
+ Y="latitude",
+ Xdim="X",
+ Ydim="Y",
+ indexes=(
+ CurvilinearCellIndex(
+ X=REGIONAL_HYCOM.create().longitude,
+ Y=REGIONAL_HYCOM.create().latitude,
+ Xdim="X",
+ Ydim="Y",
+ ),
+ ),
+ ),
+ id="hycom",
+ ),
pytest.param(
POPDS,
"UVEL",
@@ -374,9 +401,11 @@ async def test_subset(global_datasets, tile, tms):
slicer = next(iter(slicers["point"]))
assert isinstance(slicer, UgridIndexer)
else:
- assert isinstance(slicers["latitude"], list)
- assert isinstance(slicers["longitude"], list)
- assert len(slicers["latitude"]) == 1 # Y dimension should always have one slice
+ assert isinstance(slicers.get("latitude", slicers.get("Y")), list)
+ assert isinstance(slicers.get("longitude", slicers.get("X")), list)
+ y_slicers = slicers.get("latitude", slicers.get("Y"))
+ assert y_slicers is not None
+ assert len(y_slicers) == 1 # Y dimension should always have one slice
# Check that coordinates are within expected bounds (exact matching with controlled grid)
actual = await apply_slicers(
@@ -760,6 +789,17 @@ def _create_test_dataset(
coords={"x": np.arange(array_size), "y": np.arange(array_size)},
)
grid = Curvilinear.from_dataset(ds, CRS.from_epsg(4326), "lon", "lat")
+ elif grid_type == "curvilinear_hycom":
+ lon, lat = np.meshgrid(lon_coords, lat_coords)
+ ds = xr.Dataset(
+ {
+ "temp": (["y", "x"], data),
+ "lon": (["y", "x"], lon),
+ "lat": (["y", "x"], lat),
+ },
+ coords={"x": np.arange(array_size), "y": np.arange(array_size)},
+ )
+ grid = Curvilinear.from_dataset(ds, CRS.from_epsg(4326), "lon", "lat")
elif grid_type == "raster_affine":
pixel_size_x = (
(lon_coords[-1] - lon_coords[0]) / (array_size - 1) if array_size > 1 else 1.0
@@ -831,7 +871,9 @@ def test_min_max_zoom_relationship(self, tms_id):
@pytest.mark.parametrize(
"tms_id", ["WebMercatorQuad", "WGS1984Quad", "WorldCRS84Quad"]
)
- @pytest.mark.parametrize("grid_type", ["rectilinear", "curvilinear", "raster_affine"])
+ @pytest.mark.parametrize(
+ "grid_type", ["rectilinear", "curvilinear", "curvilinear_hycom", "raster_affine"]
+ )
@given(data=st.data())
@settings(deadline=None)
def test_max_zoom_matches_dataset_resolution(self, tms_id, grid_type, data):
@@ -853,7 +895,9 @@ def test_max_zoom_matches_dataset_resolution(self, tms_id, grid_type, data):
@pytest.mark.parametrize(
"tms_id", ["WebMercatorQuad", "WGS1984Quad", "WorldCRS84Quad"]
)
- @pytest.mark.parametrize("grid_type", ["rectilinear", "curvilinear", "raster_affine"])
+ @pytest.mark.parametrize(
+ "grid_type", ["rectilinear", "curvilinear", "curvilinear_hycom", "raster_affine"]
+ )
@given(data=st.data())
@settings(deadline=None)
def test_min_zoom_matches_renderable_size_limit(self, tms_id, grid_type, data):
diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py
index b582669..5930473 100644
--- a/tests/test_pipeline.py
+++ b/tests/test_pipeline.py
@@ -36,9 +36,11 @@
FORECAST,
GLOBAL_6KM,
GLOBAL_6KM_360,
+ GLOBAL_HYCOM,
GLOBAL_NANS,
HRRR,
PARA,
+ REGIONAL_HYCOM,
)
from xpublish_tiles.testing.lib import (
assert_render_matches_snapshot,
@@ -389,6 +391,10 @@ def test_apply_query_selectors():
result = apply_query(curvilinear_ds, variables=["foo"], selectors={})
assert_equal(result["foo"].da, curvilinear_ds.foo.sel(s_rho=0, method="nearest"))
+ curvilinear_hycom_ds = REGIONAL_HYCOM.create()
+ result = apply_query(curvilinear_hycom_ds, variables=["foo"], selectors={})
+ assert_equal(result["foo"].da, curvilinear_hycom_ds.foo)
+
hrrr = HRRR.create()
hrrr.time.attrs = {"standard_name": "time"}
result = apply_query(hrrr, variables=["foo"], selectors={"time": "2018-01-01"})
@@ -681,3 +687,27 @@ async def test_hrrr_multiple_vs_hrrr_rendering(tile, tms, pytestconfig):
f"HRRR_MULTIPLE should render identically to HRRR for tile {tile} "
f"but images differ"
)
+
+
+@pytest.mark.parametrize(
+ "tile",
+ [
+ morecantile.Tile(x=0, y=0, z=0),
+ morecantile.Tile(x=1, y=1, z=1),
+ morecantile.Tile(x=2, y=3, z=2),
+ morecantile.Tile(x=0, y=2, z=2),
+ ],
+)
+async def test_hycom_like_grid(tile, png_snapshot, pytestconfig):
+ ds = GLOBAL_HYCOM.create()
+ query = create_query_params(tms=WEBMERC_TMS, tile=tile)
+ result = await pipeline(ds, query)
+ if pytestconfig.getoption("--visualize"):
+ visualize_tile(result, tile)
+ assert_render_matches_snapshot(
+ result,
+ png_snapshot,
+ tile=tile,
+ tms=WEBMERC_TMS,
+ dataset_bbox=ds.attrs.get("bbox"),
+ )
diff --git a/uv.lock b/uv.lock
index e9eaba4..58185a0 100644
--- a/uv.lock
+++ b/uv.lock
@@ -400,6 +400,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
]
+[[package]]
+name = "cfgv"
+version = "3.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" },
+]
+
[[package]]
name = "cftime"
version = "1.6.4.post1"
@@ -1025,6 +1034,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998, upload-time = "2025-01-27T10:46:09.186Z" },
]
+[[package]]
+name = "distlib"
+version = "0.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
+]
+
[[package]]
name = "distributed"
version = "2025.7.0"
@@ -1695,6 +1713,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/97/cf875a19e379a322438620a359ebd5fd3b5916d26b88712c02c811389af0/icechunk-1.1.4-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e702fd1ee2b7d39966da2bffafb1fcba7c0585a8992e34ee1fa6952a3eec6612", size = 16930358, upload-time = "2025-08-21T16:08:35.035Z" },
]
+[[package]]
+name = "identify"
+version = "2.6.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" },
+]
+
[[package]]
name = "idna"
version = "3.10"
@@ -2389,6 +2416,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406, upload-time = "2025-05-29T11:35:04.961Z" },
]
+[[package]]
+name = "nodeenv"
+version = "1.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
+]
+
[[package]]
name = "numba"
version = "0.61.2"
@@ -3031,6 +3067,22 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/87/77cc11c7a9ea9fd05503def69e3d18605852cd0d4b0d3b8f15bbeb3ef1d1/pooch-1.8.2-py3-none-any.whl", hash = "sha256:3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47", size = 64574, upload-time = "2024-06-06T16:53:44.343Z" },
]
+[[package]]
+name = "pre-commit"
+version = "4.5.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cfgv" },
+ { name = "identify" },
+ { name = "nodeenv" },
+ { name = "pyyaml" },
+ { name = "virtualenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" },
+]
+
[[package]]
name = "prometheus-client"
version = "0.22.1"
@@ -4556,6 +4608,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" },
]
+[[package]]
+name = "virtualenv"
+version = "20.35.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "distlib" },
+ { name = "filelock" },
+ { name = "platformdirs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846bd9df527390ecc26b3805a0c5989048c210e22c5ca9/virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c", size = 6028799, upload-time = "2025-10-29T06:57:40.511Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size = 6005095, upload-time = "2025-10-29T06:57:37.598Z" },
+]
+
[[package]]
name = "wcwidth"
version = "0.2.13"
@@ -4745,6 +4811,7 @@ dev = [
{ name = "netcdf4" },
{ name = "pdbpp" },
{ name = "pooch" },
+ { name = "pre-commit" },
{ name = "ruff" },
{ name = "ty" },
{ name = "xpublish-tiles", extra = ["testing"] },
@@ -4799,6 +4866,7 @@ dev = [
{ name = "netcdf4", specifier = ">=1.7.2" },
{ name = "pdbpp", specifier = ">=0.11.7" },
{ name = "pooch", specifier = ">=1.8.2" },
+ { name = "pre-commit", specifier = ">=4.5.1" },
{ name = "ruff", specifier = ">=0.12.4" },
{ name = "ty", specifier = ">=0.0.1a15" },
{ name = "xpublish-tiles", extras = ["testing"] },