diff --git a/src/xpublish_tiles/xpublish/tiles/plugin.py b/src/xpublish_tiles/xpublish/tiles/plugin.py index 72445b8..258807f 100644 --- a/src/xpublish_tiles/xpublish/tiles/plugin.py +++ b/src/xpublish_tiles/xpublish/tiles/plugin.py @@ -4,7 +4,7 @@ import io import json from enum import Enum -from typing import Annotated +from typing import Annotated, Iterable from urllib.parse import quote import morecantile @@ -13,6 +13,7 @@ from xpublish import Dependencies, Plugin, hookimpl from xarray import Dataset +from xarray.core.datatree import DataTree from xpublish_tiles.grids import guess_grid_system from xpublish_tiles.lib import ( IndexingError, @@ -56,6 +57,288 @@ TilesetsList, ) +DATASET_ID_ATTR_KEY = "_xpublish_id" + +_TMS_IDS: Iterable[str] = get_all_tile_matrix_set_ids() + + +def _resolve_level_dataset(tree: DataTree, level: int | str, datatree_id: str): + """Pick the dataset stored under a level named by the zoom (string int).""" + level_key = str(level) + try: + level_node = tree[level_key] + except KeyError as exc: + raise HTTPException( + status_code=404, detail=f"Precomputed level '{level_key}' not found" + ) from exc + + ds = level_node.ds + if ds is None: + raise HTTPException( + status_code=404, detail=f"Precomputed level '{level_key}' has no dataset" + ) + + if ds.attrs.get(DATASET_ID_ATTR_KEY) is None: + ds = ds.assign_attrs({DATASET_ID_ATTR_KEY: f"{datatree_id}:{level_key}"}) + + return ds + + +def _parse_tile_query(request: Request) -> TileQuery: + """Parse TileQuery from request query parameters (support repeated params).""" + qp = request.query_params + params: dict[str, object] = {} + for key in qp.keys(): + values = qp.getlist(key) + if key == "variables": + params[key] = values + else: + params[key] = values[0] if values else None + + # Provide defaults so field validators run (style/f/render_errors) + params.setdefault("style", "raster/default") + params.setdefault("f", "image/png") + params.setdefault("render_errors", False) + + return TileQuery.model_validate(params) + + +def _extract_selectors(request: Request, dataset: Dataset): + """Extract dimension selectors from query parameters, excluding tile filter params.""" + selectors = {} + for param_name, param_value in request.query_params.items(): + if param_name in TILES_FILTERED_QUERY_PARAMS: + continue + if param_name in dataset.dims: + selectors[param_name] = param_value + return selectors + + +async def _build_tilesets_list(dataset: Dataset) -> TilesetsList: + """Create a TilesetsList for a dataset (shared by dataset & datatree flows).""" + dataset_attrs = dataset.attrs + title = dataset_attrs.get("title", "Dataset") + description = dataset_attrs.get("description", "") + keywords = dataset_attrs.get("keywords", "") + if isinstance(keywords, str): + keywords = [k.strip() for k in keywords.split(",") if k.strip()] + elif not isinstance(keywords, list): + keywords = [] + + styles = [] + for renderer_cls in RenderRegistry.all().values(): + default_variant = renderer_cls.default_variant() + default_style_info = renderer_cls.describe_style("default") + default_style_info["title"] = ( + f"{renderer_cls.style_id().title()} - Default ({default_variant.title()})" + ) + default_style_info["description"] = ( + f"Default {renderer_cls.style_id()} rendering (alias for {default_variant})" + ) + styles.append( + Style( + id=default_style_info["id"], + title=default_style_info["title"], + description=default_style_info["description"], + ) + ) + for variant in renderer_cls.supported_variants(): + style_info = renderer_cls.describe_style(variant) + styles.append( + Style( + id=style_info["id"], + title=style_info["title"], + description=style_info["description"], + ) + ) + + layer_extents = {} + for var_name in dataset.data_vars.keys(): + if dataset[var_name].ndim == 0: + continue + extents = await extract_dataset_extents(dataset, var_name) + layer_extents[var_name] = extents + + tilesets = [] + for tms_id in _TMS_IDS: + tileset = await create_tileset_for_tms( + dataset, + tms_id, + layer_extents=layer_extents, + title=title, + description=description, + keywords=keywords, + dataset_attrs=dataset_attrs, + styles=styles, + ) + if tileset: + tilesets.append(tileset) + + return TilesetsList(tilesets=tilesets) + + +async def _create_tilejson( + request: Request, + dataset: Dataset, + tileMatrixSetId: str, + query: TileQuery, + selectors: dict[str, str], +): + """Shared TileJSON construction.""" + if tileMatrixSetId not in TILE_MATRIX_SET_SUMMARIES: + raise HTTPException(status_code=404, detail="Tile matrix set not found") + + if not query.variables or len(query.variables) == 0: + raise HTTPException(status_code=422, detail="No variables specified") + + try: + bounds = await extract_variable_bounding_box( + dataset, query.variables[0], "EPSG:4326" + ) + except VariableNotFoundError as e: + logger.error("VariableNotFoundError", str(e)) + raise HTTPException( + status_code=422, + detail=f"Invalid variable name(s): {query.variables!r}.", + ) from None + + base_url = str(request.base_url).rstrip("/") + root_path = request.scope.get("root_path", "") + tiles_path = request.url.path.replace(root_path, "", 1).rsplit("/", 1)[0] + + style = query.style[0] if query.style else "raster" + variant = query.style[1] if query.style else "default" + + url_template = f"{base_url}{tiles_path}/{{z}}/{{y}}/{{x}}?variables={','.join(query.variables)}&style={style}/{variant}&width={query.width}&height={query.height}&f={query.f}&render_errors={str(query.render_errors).lower()}" + if query.colorscalerange: + url_template = f"{url_template}&colorscalerange={query.colorscalerange[0]:g},{query.colorscalerange[1]:g}" + + if query.colormap: + url_template = f"{url_template}&colormap={quote(json.dumps(query.colormap))}" + + if selectors: + selector_qs = "&".join(f"{k}={v}" for k, v in selectors.items()) + url_template = f"{url_template}&{selector_qs}" + + bounds_list = None + if bounds is not None: + bounds_list = normalize_tilejson_bounds( + [ + bounds.lowerLeft[0], + bounds.lowerLeft[1], + bounds.upperRight[0], + bounds.upperRight[1], + ] + ) + + tms = morecantile.tms.get(tileMatrixSetId) + var_name = query.variables[0] + grid = await async_run(guess_grid_system, dataset, var_name) + da = dataset.cf[var_name] + + bound_logger = get_context_logger() + bound_logger = bound_logger.bind(tms=tms.id) + set_context_logger(bound_logger) + + minzoom = await async_run(get_min_zoom, grid, tms, da) + maxzoom = tms.maxzoom + + return TileJSON( + tilejson="3.0.0", + tiles=[url_template], + name=dataset.attrs.get("title", "Dataset"), + description=dataset.attrs.get("description"), + version=dataset.attrs.get("version"), + scheme="xyz", + attribution=dataset.attrs.get("attribution"), + bounds=bounds_list, + minzoom=minzoom, + maxzoom=maxzoom, + ) + + +async def _render_tile( + request: Request, + dataset: Dataset, + tileMatrixSetId: str, + tileMatrix: int, + tileRow: int, + tileCol: int, + query: TileQuery, +): + """Shared tile rendering logic.""" + try: + bbox, crs = extract_tile_bbox_and_crs( + tileMatrixSetId, tileMatrix, tileRow, tileCol + ) + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) from e + + selectors = _extract_selectors(request, dataset) + + style = query.style[0] if query.style else "raster" + variant = query.style[1] if query.style else "default" + + render_params = QueryParams( + variables=query.variables, + style=style, + colorscalerange=query.colorscalerange, + variant=variant, + crs=crs, + bbox=bbox, + width=query.width, + height=query.height, + format=query.f, + selectors=selectors, + colormap=query.colormap, + ) + + try: + buffer = await pipeline(dataset, render_params) + status_code = 200 + detail = "OK" + except TileTooBigError: + status_code = 413 + detail = f"Tile {tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol} request too big. Please choose a higher zoom level." + bound_logger = get_context_logger() + bound_logger.error("TileTooBigError", message=detail) + except VariableNotFoundError as e: + bound_logger = get_context_logger() + bound_logger.error("VariableNotFoundError", error=str(e)) + status_code = 422 + detail = f"Invalid variable name(s): {query.variables!r}." + except IndexingError as e: + bound_logger = get_context_logger() + bound_logger.error("IndexingError", error=str(e)) + status_code = 422 + detail = f"Invalid indexer: {selectors!r}." + except MissingParameterError as e: + bound_logger = get_context_logger() + bound_logger.error("MissingParameterError", error=str(e)) + status_code = 422 + detail = f"Missing parameter: {e!s}." + except Exception as e: # pragma: no cover - passthrough errors + status_code = 500 + bound_logger = get_context_logger() + bound_logger.error("Exception", error=str(e)) + detail = str(e) + + if status_code != 200: + if not query.render_errors: + raise HTTPException(status_code=status_code, detail=detail) + else: + renderer = render_params.get_renderer() + buffer = io.BytesIO() + renderer.render_error( + buffer=buffer, + width=query.width, + height=query.height, + message=detail, + format=query.f, + ) + + return Response(buffer.getbuffer(), media_type="image/png") + class TilesPlugin(Plugin): name: str = "tiles" @@ -66,6 +349,9 @@ class TilesPlugin(Plugin): dataset_router_prefix: str = "/tiles" dataset_router_tags: list[str | Enum] = ["tiles"] + datatree_router_prefix: str = "/tiles" + datatree_router_tags: list[str | Enum] = ["tiles"] + @hookimpl def app_router(self, deps: Dependencies): """Global tiles endpoints""" @@ -122,81 +408,7 @@ async def get_dataset_tiles_list( dataset: Dataset = Depends(deps.dataset), ): """List of available tilesets for this dataset""" - # Get dataset metadata - dataset_attrs = dataset.attrs - title = dataset_attrs.get("title", "Dataset") - description = dataset_attrs.get("description", "") - keywords = dataset_attrs.get("keywords", "") - if isinstance(keywords, str): - keywords = [k.strip() for k in keywords.split(",") if k.strip()] - elif not isinstance(keywords, list): - keywords = [] - - # Get available styles from registered renderers - logger.info(f"Getting available styles for dataset '{title}'") - - styles = [] - for renderer_cls in RenderRegistry.all().values(): - # Add default variant alias - default_variant = renderer_cls.default_variant() - default_style_info = renderer_cls.describe_style("default") - default_style_info["title"] = ( - f"{renderer_cls.style_id().title()} - Default ({default_variant.title()})" - ) - default_style_info["description"] = ( - f"Default {renderer_cls.style_id()} rendering (alias for {default_variant})" - ) - styles.append( - Style( - id=default_style_info["id"], - title=default_style_info["title"], - description=default_style_info["description"], - ) - ) - - # Add all actual variants - for variant in renderer_cls.supported_variants(): - style_info = renderer_cls.describe_style(variant) - styles.append( - Style( - id=style_info["id"], - title=style_info["title"], - description=style_info["description"], - ) - ) - - logger.info("loading extents for dataset vars") - - layer_extents = {} - for var_name in dataset.data_vars.keys(): - # Skip scalar variables - if dataset[var_name].ndim == 0: - continue - extents = await extract_dataset_extents(dataset, var_name) - layer_extents[var_name] = extents - - # Create one tileset entry per supported tile matrix set - supported_tms = get_all_tile_matrix_set_ids() - - # Execute concurrently in the event loop - tileset_results = await asyncio.gather( - *[ - create_tileset_for_tms( - dataset, - tms_id, - layer_extents, - title, - description, - keywords, - dataset_attrs, - styles, - ) - for tms_id in supported_tms - ] - ) - tilesets = [ts for ts in tileset_results if ts is not None] - - return TilesetsList(tilesets=tilesets) + return await _build_tilesets_list(dataset) @router.get( "/{tileMatrixSetId}", @@ -244,109 +456,8 @@ async def get_dataset_tilejson( dataset: Dataset = Depends(deps.dataset), ): """Get TileJSON specification for this dataset and tile matrix set""" - # Validate that the tile matrix set exists - if tileMatrixSetId not in TILE_MATRIX_SET_SUMMARIES: - raise HTTPException(status_code=404, detail="Tile matrix set not found") - - # Extract dimension selectors from query parameters - selectors = {} - for param_name, param_value in request.query_params.items(): - # Skip the standard tile query parameters - if param_name not in TILES_FILTERED_QUERY_PARAMS: - # Check if this parameter corresponds to a dataset dimension - if param_name in dataset.dims: - selectors[param_name] = param_value - - if not query.variables or len(query.variables) == 0: - raise HTTPException(status_code=422, detail="No variables specified") - - try: - bounds = await extract_variable_bounding_box( - dataset, query.variables[0], "EPSG:4326" - ) - except VariableNotFoundError as e: - logger.error("VariableNotFoundError", str(e)) - raise HTTPException( - status_code=422, - detail=f"Invalid variable name(s): {query.variables!r}.", - ) from None - - logger.info(f"base_url: {request.base_url}") - logger.info(f"url: {request.url.path}") - logger.info(f"root_path: {request.scope.get('root_path')}") - - # Build tile URL template relative to this endpoint - base_url = str(request.base_url).rstrip("/") - root_path = request.scope.get("root_path", "") - - # dataset path prefix already includes /datasets/{id} by xpublish; request.url.path points to /datasets/{id}/tiles/{tms}/tilejson.json - # Construct sibling tiles path replacing tailing segment - tiles_path = request.url.path.replace(root_path, "", 1).rsplit("/", 1)[ - 0 - ] # drop 'tilejson.json' - - style = query.style[0] if query.style else "raster" - variant = query.style[1] if query.style else "default" - - # XYZ template - url_template = f"{base_url}{tiles_path}/{{z}}/{{y}}/{{x}}?variables={','.join(query.variables)}&style={style}/{variant}&width={query.width}&height={query.height}&f={query.f}&render_errors={str(query.render_errors).lower()}" - # Append optional color scale range - if query.colorscalerange: - url_template = f"{url_template}&colorscalerange={query.colorscalerange[0]:g},{query.colorscalerange[1]:g}" - - if query.colormap: - url_template = ( - f"{url_template}&colormap={quote(json.dumps(query.colormap))}" - ) - - # Append selectors - if selectors: - selector_qs = "&".join(f"{k}={v}" for k, v in selectors.items()) - url_template = f"{url_template}&{selector_qs}" - - # Compute bounds list if available - bounds_list = None - if bounds is not None: - bounds_list = normalize_tilejson_bounds( - [ - bounds.lowerLeft[0], - bounds.lowerLeft[1], - bounds.upperRight[0], - bounds.upperRight[1], - ] - ) - - # Determine min/max zoom from dataset characteristics - # Get the original morecantile TMS for minzoom/maxzoom properties - tms = morecantile.tms.get(tileMatrixSetId) - - # Calculate optimal zoom levels based on grid and data characteristics - # Get the first variable's grid system - var_name = query.variables[0] - grid = await async_run(guess_grid_system, dataset, var_name) - da = dataset.cf[var_name] - - bound_logger = get_context_logger() - bound_logger = bound_logger.bind(tms=tms.id) - set_context_logger(bound_logger) - - # Calculate min/max zoom based on data characteristics - minzoom = await async_run(get_min_zoom, grid, tms, da) - maxzoom = tms.maxzoom - - # Compose TileJSON - return TileJSON( - tilejson="3.0.0", - tiles=[url_template], - name=dataset.attrs.get("title", "Dataset"), - description=dataset.attrs.get("description"), - version=dataset.attrs.get("version"), - scheme="xyz", - attribution=dataset.attrs.get("attribution"), - bounds=bounds_list, - minzoom=minzoom, - maxzoom=maxzoom, - ) + selectors = _extract_selectors(request, dataset) + return await _create_tilejson(request, dataset, tileMatrixSetId, query, selectors) @router.get("/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}") @with_accumulated_logs( @@ -379,84 +490,137 @@ async def get_dataset_tile( dataset: Dataset = Depends(deps.dataset), ): """Get individual tile from this dataset""" - try: - bbox, crs = extract_tile_bbox_and_crs( - tileMatrixSetId, tileMatrix, tileRow, tileCol + return await _render_tile( + request, + dataset, + tileMatrixSetId, + tileMatrix, + tileRow, + tileCol, + query, + ) + + return router + + @hookimpl + def datatree_router(self, deps: Dependencies): + """Tiles endpoints for DataTree-based multiscale pyramids.""" + router = APIRouter( + prefix=self.datatree_router_prefix, tags=self.datatree_router_tags + ) + + @router.get("/", response_model=TilesetsList, response_model_exclude_none=True) + @with_accumulated_logs( + log_message_fn=lambda datatree_id, datatree: f"tiles_list_multiscale {datatree_id}", + context_fn=lambda datatree_id, datatree: { + "endpoint": "tiles_list_multiscale", + "datatree_id": datatree_id, + }, + ) + async def get_datatree_tiles_list( + datatree_id: str, + datatree: DataTree = Depends(deps.datatree), + ): + # Aggregate tilesets across all registered TMS for level 0 (metadata only). + if "0" not in datatree: + raise HTTPException( + status_code=404, detail="Precomputed level '0' not found" ) + + dataset = _resolve_level_dataset(datatree, 0, datatree_id) + return await _build_tilesets_list(dataset) + + @router.get( + "/{tileMatrixSetId}", + response_model=TileSetMetadata, + response_model_exclude_none=True, + ) + async def get_datatree_tileset_metadata( + datatree_id: str, + tileMatrixSetId: str, + datatree: DataTree = Depends(deps.datatree), + ): + dataset = _resolve_level_dataset(datatree, 0, datatree_id) + try: + return await async_run(create_tileset_metadata, dataset, tileMatrixSetId) except ValueError as e: raise HTTPException(status_code=404, detail=str(e)) from e - # Extract dimension selectors from query parameters - selectors = {} - for param_name, param_value in request.query_params.items(): - # Skip the standard tile query parameters - if param_name not in TILES_FILTERED_QUERY_PARAMS: - # Check if this parameter corresponds to a dataset dimension - if param_name in dataset.dims: - selectors[param_name] = param_value - - style = query.style[0] if query.style else "raster" - variant = query.style[1] if query.style else "default" - - render_params = QueryParams( - variables=query.variables, - style=style, - colorscalerange=query.colorscalerange, - variant=variant, - crs=crs, - bbox=bbox, - width=query.width, - height=query.height, - format=query.f, - selectors=selectors, - colormap=query.colormap, + @router.get( + "/{tileMatrixSetId}/tilejson.json", + response_model=TileJSON, + response_model_exclude_none=True, + ) + @with_accumulated_logs( + log_message_fn=lambda request, + datatree_id, + tileMatrixSetId, + query, + tileMatrix, + datatree: f"tilejson_multiscale {tileMatrixSetId} {query.variables} {datatree_id}", + context_fn=lambda request, datatree_id, tileMatrixSetId, query, tileMatrix, datatree: { + "tileMatrixSetId": tileMatrixSetId, + "variables": query.variables, + "tileMatrix": tileMatrix, + "datatree_id": datatree_id, + }, + ) + async def get_datatree_tilejson( + request: Request, + datatree_id: str, + tileMatrixSetId: str, + query: Annotated[TileQuery, Depends(_parse_tile_query)], + tileMatrix: Annotated[int, Query(description="Requested zoom level")], + datatree: DataTree = Depends(deps.datatree), + ): + dataset = _resolve_level_dataset(datatree, tileMatrix, datatree_id) + selectors = _extract_selectors(request, dataset) + return await _create_tilejson( + request, dataset, tileMatrixSetId, query, selectors ) - try: - buffer = await pipeline(dataset, render_params) - status_code = 200 - detail = "OK" - except TileTooBigError: - status_code = 413 - detail = f"Tile {tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol} request too big. Please choose a higher zoom level." - bound_logger = get_context_logger() - bound_logger.error("TileTooBigError", message=detail) - except VariableNotFoundError as e: - bound_logger = get_context_logger() - bound_logger.error("VariableNotFoundError", error=str(e)) - status_code = 422 - detail = f"Invalid variable name(s): {query.variables!r}." - except IndexingError as e: - bound_logger = get_context_logger() - bound_logger.error("IndexingError", error=str(e)) - status_code = 422 - detail = f"Invalid indexer: {selectors!r}." - except MissingParameterError as e: - bound_logger = get_context_logger() - bound_logger.error("MissingParameterError", error=str(e)) - status_code = 422 - detail = f"Missing parameter: {e!s}." - except Exception as e: - status_code = 500 - bound_logger = get_context_logger() - bound_logger.error("Exception", error=str(e)) - detail = str(e) - - if status_code != 200: - if not query.render_errors: - raise HTTPException(status_code=status_code, detail=detail) - else: - # Use renderer's render_error method for all error types - renderer = render_params.get_renderer() - buffer = io.BytesIO() - renderer.render_error( - buffer=buffer, - width=query.width, - height=query.height, - message=detail, - format=query.f, - ) - - return Response(buffer.getbuffer(), media_type="image/png") + @router.get("/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}") + @with_accumulated_logs( + log_message_fn=lambda request, + datatree_id, + tileMatrixSetId, + tileMatrix, + tileRow, + tileCol, + query, + datatree: f"tile_multiscale {tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol} {query.variables} {datatree_id}", + context_fn=lambda request, + datatree_id, + tileMatrixSetId, + tileMatrix, + tileRow, + tileCol, + query, + datatree: { + "tile": f"{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", + "variables": query.variables, + "datatree_id": datatree_id, + }, + ) + async def get_datatree_tile( + request: Request, + datatree_id: str, + tileMatrixSetId: str, + tileMatrix: int, + tileRow: int, + tileCol: int, + query: Annotated[TileQuery, Depends(_parse_tile_query)], + datatree: DataTree = Depends(deps.datatree), + ): + dataset = _resolve_level_dataset(datatree, tileMatrix, datatree_id) + return await _render_tile( + request, + dataset, + tileMatrixSetId, + tileMatrix, + tileRow, + tileCol, + query, + ) return router diff --git a/tests/test_xpublish/test_tiles/test_precomputed_multiscale_tiles_plugin.py b/tests/test_xpublish/test_tiles/test_precomputed_multiscale_tiles_plugin.py new file mode 100644 index 0000000..25d475e --- /dev/null +++ b/tests/test_xpublish/test_tiles/test_precomputed_multiscale_tiles_plugin.py @@ -0,0 +1,121 @@ +import numpy as np +import pytest +import xarray as xr +from fastapi.testclient import TestClient + +from xpublish import Rest +from xpublish_tiles.xpublish.tiles import TilesPlugin + + +def _make_level_dataset(level: int, lat_size: int, lon_size: int) -> xr.Dataset: + lat = np.linspace(-85, 85, lat_size) + lon = np.linspace(-180, 180, lon_size, endpoint=False) + data = np.full((lat_size, lon_size), fill_value=float(level), dtype=np.float32) + + ds = xr.Dataset( + { + "foo": ( + ("lat", "lon"), + data, + {"valid_min": float(data.min()), "valid_max": float(data.max())}, + ) + }, + coords={ + "lat": ( + "lat", + lat, + { + "axis": "Y", + "standard_name": "latitude", + "units": "degrees_north", + }, + ), + "lon": ( + "lon", + lon, + { + "axis": "X", + "standard_name": "longitude", + "units": "degrees_east", + }, + ), + }, + attrs={"title": f"Precomputed level {level}"}, + ) + + return ds + + +@pytest.fixture(scope="session") +def multiscale_datatree(): + levels = { + str(level): _make_level_dataset(level, 8 * (2**level), 16 * (2**level)) + for level in (0, 1, 2) + } + return xr.DataTree.from_dict(levels, name="pyramid") + + +@pytest.fixture(scope="session") +def multiscale_client(multiscale_datatree): + rest = Rest( + datatrees={"pyramid": multiscale_datatree}, + plugins={"precomputed_tiles": TilesPlugin()}, + ) + return TestClient(rest.app) + + +@pytest.mark.parametrize("zoom", [0, 1, 2]) +def test_tilejson_uses_requested_level_metadata(multiscale_client, zoom): + response = multiscale_client.get( + "/datatrees/pyramid/tiles/WebMercatorQuad/tilejson.json" + f"?variables=foo&width=256&height=256&tileMatrix={zoom}" + ) + + assert response.status_code == 200 + tilejson = response.json() + + assert tilejson["name"] == f"Precomputed level {zoom}" + assert any("/datatrees/pyramid/tiles/WebMercatorQuad" in url for url in tilejson["tiles"]) + + +@pytest.mark.parametrize("zoom", [0, 1, 2]) +def test_tiles_resolve_precomputed_levels(multiscale_client, zoom): + response = multiscale_client.get( + f"/datatrees/pyramid/tiles/WebMercatorQuad/{zoom}/0/0" + "?variables=foo&width=256&height=256" + ) + + assert response.status_code == 200 + assert response.headers["content-type"] == "image/png" + + +def test_missing_level_returns_404(multiscale_client): + tilejson_response = multiscale_client.get( + "/datatrees/pyramid/tiles/WebMercatorQuad/tilejson.json" + "?variables=foo&width=256&height=256&tileMatrix=5" + ) + assert tilejson_response.status_code == 404 + + tile_response = multiscale_client.get( + "/datatrees/pyramid/tiles/WebMercatorQuad/5/0/0" + "?variables=foo&width=256&height=256" + ) + assert tile_response.status_code == 404 + + +def test_dataset_tiles_plugin_still_works_with_precomputed_plugin( + multiscale_datatree, air_dataset +): + rest = Rest( + {"air": air_dataset}, + datatrees={"pyramid": multiscale_datatree}, + plugins={"tiles": TilesPlugin()}, + ) + client = TestClient(rest.app) + + response = client.get( + "/datasets/air/tiles/WebMercatorQuad/tilejson.json" + "?variables=air&width=256&height=256" + ) + + assert response.status_code == 200