diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py
index 3745d93e..69bd9a7f 100644
--- a/src/backend/app/projects/project_routes.py
+++ b/src/backend/app/projects/project_routes.py
@@ -24,9 +24,11 @@
Response,
UploadFile,
)
+from fastapi.concurrency import run_in_threadpool
from fastapi.responses import StreamingResponse
from geojson_pydantic import FeatureCollection
from loguru import logger as log
+from minio.error import S3Error
from psycopg import Connection
from psycopg.rows import dict_row
from stream_zip import NO_COMPRESSION_64, stream_zip
@@ -1913,6 +1915,185 @@ async def head_odm_assets(
)
+# ---------------------------------------------------------------------------
+# 3D Tiles proxy
+#
+# 3D Tiles consist of a root ``tileset.json`` plus a tree of individual tile
+# binaries (``.b3dm``, ``.glb``, …) referenced via relative paths. Presigning
+# every tile would be impractical (hundreds of files, expiry windows, mutated
+# URLs in tileset.json), so we stream them through the backend instead. All
+# authentication and access control happens here; S3 stays private.
+#
+# Layout in S3: ``projects/{project_id}/3d-tiles/...``
+# ---------------------------------------------------------------------------
+
+# Cache lifetime for proxied tiles. Tiles are content-addressed (they don't
+# change once written for a given pipeline run) so 1 hour is conservative.
+# ETag-based revalidation handles changes after expiry.
+_TILE_CACHE_MAX_AGE = 3600
+
+_TILE_CONTENT_TYPES: dict[str, str] = {
+ ".b3dm": "application/octet-stream",
+ ".i3dm": "application/octet-stream",
+ ".cmpt": "application/octet-stream",
+ ".pnts": "application/octet-stream",
+ ".glb": "model/gltf-binary",
+ ".gltf": "model/gltf+json",
+ ".json": "application/json",
+ ".bin": "application/octet-stream",
+ ".png": "image/png",
+ ".jpg": "image/jpeg",
+ ".jpeg": "image/jpeg",
+ ".ktx2": "image/ktx2",
+ ".webp": "image/webp",
+}
+
+
+def _tile_content_type(file_path: str) -> str:
+ suffix = f".{file_path.rsplit('.', 1)[-1].lower()}" if "." in file_path else ""
+ return _TILE_CONTENT_TYPES.get(suffix, "application/octet-stream")
+
+
+def _validate_tile_path(file_path: str) -> str:
+ """Validate and normalise a tile sub-path.
+
+ Rejects empty paths, absolute paths, parent traversal (raw or URL-encoded -
+ FastAPI URL-decodes path params already, so a single ``..`` check suffices),
+ backslashes (Windows-style), and any control characters that could be used
+ to inject CR/LF into S3 keys.
+ """
+ if not file_path:
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST, detail="Empty tile path."
+ )
+ if file_path.startswith("/") or "\\" in file_path:
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST, detail="Invalid tile path."
+ )
+ # Split on '/' and reject any '..' segment. Substring check would also
+ # match legitimate filenames like ``foo..bar`` so we go segment-wise.
+ for segment in file_path.split("/"):
+ if segment in {"", ".", ".."}:
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST, detail="Invalid tile path."
+ )
+ if any(ord(c) < 0x20 for c in file_path):
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST, detail="Invalid tile path."
+ )
+ return file_path
+
+
+def _build_tile_response_headers(stat) -> dict[str, str]:
+ """Common cache + integrity headers for tile responses (HEAD and GET)."""
+ headers = {
+ "Cache-Control": f"public, max-age={_TILE_CACHE_MAX_AGE}",
+ "Content-Length": str(stat.size),
+ }
+ if stat.etag:
+ # MinIO surfaces the raw S3 ETag (already wrapped in quotes when
+ # multi-part). Strip surrounding whitespace just in case.
+ headers["ETag"] = stat.etag.strip('"')
+ if stat.last_modified:
+ headers["Last-Modified"] = stat.last_modified.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT"
+ )
+ return headers
+
+
+async def _stat_3d_tile(project_id: uuid.UUID, file_path: str):
+ """Stat a 3D-tile object. 404 on miss, propagates other errors as 502."""
+ object_key = f"projects/{project_id}/3d-tiles/{file_path}"
+ try:
+ return await run_in_threadpool(
+ s3_client().stat_object, settings.S3_BUCKET_NAME, object_key
+ ), object_key
+ except S3Error as exc:
+ if exc.code in {"NoSuchKey", "NoSuchBucket"}:
+ raise HTTPException(
+ status_code=HTTPStatus.NOT_FOUND,
+ detail=f"3D tile not found: {file_path}",
+ )
+ log.exception(f"S3 error fetching 3D tile {object_key}: {exc}")
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_GATEWAY, detail="Object store error."
+ )
+
+
+@router.head("/{project_id}/3d-tiles/{file_path:path}", tags=["3D Model"])
+async def head_3d_tile(
+ project_id: uuid.UUID,
+ file_path: str,
+ project: Annotated[
+ project_schemas.DbProject, Depends(project_deps.get_project_by_id)
+ ],
+):
+ """Check whether a 3D tile exists without streaming its content.
+
+ Used by the frontend before initialising TilesRenderer so the placeholder
+ can be shown immediately when tiles haven't been generated yet.
+ """
+ file_path = _validate_tile_path(file_path)
+ stat, _ = await _stat_3d_tile(project_id, file_path)
+ return Response(
+ media_type=_tile_content_type(file_path),
+ headers=_build_tile_response_headers(stat),
+ )
+
+
+@router.get("/{project_id}/3d-tiles/{file_path:path}", tags=["3D Model"])
+async def stream_3d_tile(
+ request: Request,
+ project_id: uuid.UUID,
+ file_path: str,
+ project: Annotated[
+ project_schemas.DbProject, Depends(project_deps.get_project_by_id)
+ ],
+):
+ """Stream a single 3D Tiles asset (tileset.json or a tile file) from S3.
+
+ TilesRenderer fetches ``tileset.json`` then resolves all tile paths
+ relative to it. This endpoint proxies every such request through the
+ backend so the private S3 bucket is never exposed directly.
+
+ The response carries ``Cache-Control: public, max-age=3600`` and the
+ object's S3 ETag so the browser can cache aggressively and revalidate
+ cheaply via ``If-None-Match`` on cache expiry.
+ """
+ file_path = _validate_tile_path(file_path)
+ stat, object_key = await _stat_3d_tile(project_id, file_path)
+
+ response_headers = _build_tile_response_headers(stat)
+
+ # Conditional GET: if the client already has the current version, return
+ # 304 immediately and skip the S3 download entirely.
+ if_none_match = request.headers.get("if-none-match")
+ if if_none_match and response_headers.get("ETag"):
+ client_etags = {tag.strip().strip('"') for tag in if_none_match.split(",")}
+ if response_headers["ETag"] in client_etags or "*" in client_etags:
+ return Response(
+ status_code=HTTPStatus.NOT_MODIFIED, headers=response_headers
+ )
+
+ def generate():
+ response = s3_client().get_object(settings.S3_BUCKET_NAME, object_key)
+ try:
+ while True:
+ chunk = response.read(65536)
+ if not chunk:
+ break
+ yield chunk
+ finally:
+ response.close()
+ response.release_conn()
+
+ return StreamingResponse(
+ generate(),
+ media_type=_tile_content_type(file_path),
+ headers=response_headers,
+ )
+
+
# Endpoint not used in production but useful to keep around just for testing the
# queue
@router.post("/test/arq_task")
diff --git a/src/frontend/package.json b/src/frontend/package.json
index d9b6e9d1..122fbbc8 100644
--- a/src/frontend/package.json
+++ b/src/frontend/package.json
@@ -12,6 +12,7 @@
"start": "vite"
},
"dependencies": {
+ "3d-tiles-renderer": "^0.4.24",
"@cyntler/react-doc-viewer": "^1.17.0",
"@geomatico/maplibre-cog-protocol": "^0.3.1",
"@hotosm/gcp-editor": "workspace:*",
@@ -75,6 +76,7 @@
"redux-saga": "^1.3.0",
"tailwind-merge": "^1.14.0",
"tailwindcss-animate": "^1.0.7",
+ "three": "^0.184.0",
"uuid": "^9.0.1"
},
"devDependencies": {
@@ -86,6 +88,7 @@
"@types/react": "^19.0.8",
"@types/react-dom": "^19.0.3",
"@types/react-transition-group": "^4.4.12",
+ "@types/three": "^0.184.1",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"@typescript-eslint/parser": "^5.62.0",
"@vitejs/plugin-react": "^4.3.4",
@@ -102,7 +105,8 @@
"prettier-plugin-tailwindcss": "^0.5.14",
"tailwindcss": "^3.4.17",
"typescript": "^5.9.2",
- "vite": "^5.4.11"
+ "vite": "^5.4.11",
+ "vite-plugin-static-copy": "^2.3.2"
},
"peerDependencies": {
"@awesome.me/webawesome": "3.2.1"
diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx
index e5488de7..18cd6c30 100644
--- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx
+++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx
@@ -1395,7 +1395,7 @@ ${safeReason && ["rejected", "unmatched", "invalid_exif", "duplicate"].includes(
)}
- {/* Rubber-band selection rectangle — updated via ref to avoid re-renders */}
+ {/* Rubber-band selection rectangle - updated via ref to avoid re-renders */}
}) => {
);
return getBbox(tasksCollectiveGeojson as FeatureCollection);
}
- // No tasks yet — fall back to the project outline bbox
+ // No tasks yet - fall back to the project outline bbox
return projectData?.outline?.properties?.bbox ?? null;
}, [tasksData, projectData?.outline]);
diff --git a/src/frontend/src/routes/appRoutes.ts b/src/frontend/src/routes/appRoutes.ts
index 99ecd52f..36b3d40f 100644
--- a/src/frontend/src/routes/appRoutes.ts
+++ b/src/frontend/src/routes/appRoutes.ts
@@ -16,6 +16,7 @@ const UpdateUserProfile = lazy(() => import("@Views/UpdateUserProfile"));
const RegulatorsApprovalPage = lazy(() => import("@Views/RegulatorsApprovalPage"));
const Tutorials = lazy(() => import("@Views/Tutorial"));
const ImportPage = lazy(() => import("@Views/Import"));
+const View3DModel = lazy(() => import("@Views/View3DModel"));
const appRoutes: IRoute[] = [
...userRoutes,
@@ -97,6 +98,12 @@ const appRoutes: IRoute[] = [
component: ImportPage,
authenticated: true,
},
+ {
+ path: "/projects/:id/3d-model",
+ name: "3D Model Viewer",
+ component: View3DModel,
+ authenticated: false,
+ },
];
export default appRoutes;
diff --git a/src/frontend/src/views/IndividualProject/index.tsx b/src/frontend/src/views/IndividualProject/index.tsx
index 84799808..8e132d05 100644
--- a/src/frontend/src/views/IndividualProject/index.tsx
+++ b/src/frontend/src/views/IndividualProject/index.tsx
@@ -237,6 +237,17 @@ const IndividualProject = () => {
>
GCP Editor
+ {projectData?.image_processing_status === "SUCCESS" && (
+
+ )}