Skip to content

Commit 1ddb76d

Browse files
Merge branch 'v4-dev' into MyST-quickstart
2 parents 3c9447a + 0504d40 commit 1ddb76d

19 files changed

+314
-118
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828
strategy:
2929
fail-fast: false
3030
matrix:
31-
os: [ubuntu, mac, windows]
31+
os: [ubuntu, windows]
3232
pixi-environment: [test-latest]
3333
include:
3434
- os: ubuntu

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ repos:
1010
types: [text]
1111
files: \.(json|ipynb)$
1212
- repo: https://github.com/astral-sh/ruff-pre-commit
13-
rev: v0.14.2
13+
rev: v0.14.4
1414
hooks:
1515
- id: ruff
1616
name: ruff lint (.py)

CLAUDE.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
## GitHub Interaction Guidelines
2+
3+
- **NEVER impersonate the user on GitHub**, always sign off with something like
4+
"[This is Claude Code on behalf of Jane Doe]"
5+
- Never create issues nor pull requests on the GitHub repository unless
6+
explicitly instructed
7+
- Never post "update" messages, progress reports, or explanatory comments on
8+
GitHub issues/PRs unless specifically instructed
9+
- When creating commits, always include a co-authorship trailer:
10+
`Co-authored-by: Claude <[email protected]>`

docs/development/policies.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
# Policies
22

3+
## Use of AI in development
4+
5+
Many developers use AI Large Language Models to help them in their work. These LLMs have received both praise and criticism when it comes to software development.
6+
7+
We accept that Parcels developers have their own motivation for using (or not using) AI. However, we have one policy that we expect all Parcels developers to follow:
8+
9+
> It is ultimately your responsibility to understand the code that you commit.
10+
11+
Remember that reviews are done by human maintainers - asking us to review code that an AI wrote but you don't understand isn't kind to these maintainers.
12+
13+
The [CLAUDE.md](/CLAUDE.md) file in the repository has additional instructions for AI agents to follow when contributing to Parcels.
14+
315
## Versioning
416

517
Parcels follows [Intended Effort Versioning (EffVer)](https://jacobtomlinson.dev/effver/), where the version number (e.g., v2.1.0) is thought of as `MACRO.MESO.MICRO`.

src/parcels/_core/field.py

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,6 @@
2626
from parcels._reprs import default_repr
2727
from parcels._typing import VectorType
2828
from parcels.interpolators import (
29-
UXPiecewiseLinearNode,
30-
XLinear,
3129
ZeroInterpolator,
3230
ZeroInterpolator_Vector,
3331
)
@@ -51,17 +49,11 @@ def _deal_with_errors(error, key, vector_type: VectorType):
5149
return 0
5250

5351

54-
_DEFAULT_INTERPOLATOR_MAPPING = {
55-
XGrid: XLinear,
56-
UxGrid: UXPiecewiseLinearNode,
57-
}
58-
59-
6052
class Field:
6153
"""The Field class that holds scalar field data.
6254
The `Field` object is a wrapper around a xarray.DataArray or uxarray.UxDataArray object.
6355
Additionally, it holds a dynamic Callable procedure that is used to interpolate the field data.
64-
During initialization, the user can supply a custom interpolation method that is used to interpolate the field data,
56+
During initialization, the user is required to supply a custom interpolation method that is used to interpolate the field data,
6557
so long as the interpolation method has the correct signature.
6658
6759
Notes
@@ -96,7 +88,7 @@ def __init__(
9688
name: str,
9789
data: xr.DataArray | ux.UxDataArray,
9890
grid: UxGrid | XGrid,
99-
interp_method: Callable | None = None,
91+
interp_method: Callable,
10092
):
10193
if not isinstance(data, (ux.UxDataArray, xr.DataArray)):
10294
raise ValueError(
@@ -136,11 +128,8 @@ def __init__(
136128
raise e
137129

138130
# Setting the interpolation method dynamically
139-
if interp_method is None:
140-
self._interp_method = _DEFAULT_INTERPOLATOR_MAPPING[type(self.grid)]
141-
else:
142-
assert_same_function_signature(interp_method, ref=ZeroInterpolator, context="Interpolation")
143-
self._interp_method = interp_method
131+
assert_same_function_signature(interp_method, ref=ZeroInterpolator, context="Interpolation")
132+
self._interp_method = interp_method
144133

145134
self.igrid = -1 # Default the grid index to -1
146135

@@ -217,6 +206,9 @@ def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True):
217206
_ei = None
218207
else:
219208
_ei = particles.ei[:, self.igrid]
209+
z = np.atleast_1d(z)
210+
y = np.atleast_1d(y)
211+
x = np.atleast_1d(x)
220212

221213
particle_positions, grid_positions = _get_positions(self, time, z, y, x, particles, _ei)
222214

@@ -300,6 +292,9 @@ def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True):
300292
_ei = None
301293
else:
302294
_ei = particles.ei[:, self.igrid]
295+
z = np.atleast_1d(z)
296+
y = np.atleast_1d(y)
297+
x = np.atleast_1d(x)
303298

304299
particle_positions, grid_positions = _get_positions(self.U, time, z, y, x, particles, _ei)
305300

src/parcels/_core/fieldset.py

Lines changed: 132 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
import cf_xarray # noqa: F401
88
import numpy as np
9+
import uxarray as ux
910
import xarray as xr
1011
import xgcm
1112

@@ -14,9 +15,11 @@
1415
from parcels._core.utils.string import _assert_str_and_python_varname
1516
from parcels._core.utils.time import get_datetime_type_calendar
1617
from parcels._core.utils.time import is_compatible as datetime_is_compatible
18+
from parcels._core.uxgrid import UxGrid
1719
from parcels._core.xgrid import _DEFAULT_XGCM_KWARGS, XGrid
1820
from parcels._logger import logger
1921
from parcels._typing import Mesh
22+
from parcels.interpolators import UXPiecewiseConstantFace, UXPiecewiseLinearNode, XConstantField, XLinear
2023

2124
if TYPE_CHECKING:
2225
from parcels._core.basegrid import BaseGrid
@@ -116,7 +119,7 @@ def add_field(self, field: Field, name: str | None = None):
116119

117120
self.fields[name] = field
118121

119-
def add_constant_field(self, name: str, value, mesh: Mesh = "flat"):
122+
def add_constant_field(self, name: str, value, mesh: Mesh = "spherical"):
120123
"""Wrapper function to add a Field that is constant in space,
121124
useful e.g. when using constant horizontal diffusivity
122125
@@ -134,16 +137,15 @@ def add_constant_field(self, name: str, value, mesh: Mesh = "flat"):
134137
correction for zonal velocity U near the poles.
135138
2. flat: No conversion, lat/lon are assumed to be in m.
136139
"""
137-
ds = xr.Dataset({name: (["time", "lat", "lon", "depth"], np.full((1, 1, 1, 1), value))})
138-
grid = XGrid(xgcm.Grid(ds, **_DEFAULT_XGCM_KWARGS))
139-
self.add_field(
140-
Field(
141-
name,
142-
ds[name],
143-
grid,
144-
interp_method=None, # TODO : Need to define an interpolation method for constants
145-
)
140+
ds = xr.Dataset(
141+
{name: (["lat", "lon"], np.full((1, 1), value))},
142+
coords={"lat": (["lat"], [0], {"axis": "Y"}), "lon": (["lon"], [0], {"axis": "X"})},
143+
)
144+
xgrid = xgcm.Grid(
145+
ds, coords={"X": {"left": "lon"}, "Y": {"left": "lat"}}, autoparse_metadata=False, **_DEFAULT_XGCM_KWARGS
146146
)
147+
grid = XGrid(xgrid, mesh=mesh)
148+
self.add_field(Field(name, ds[name], grid, interp_method=XConstantField))
147149

148150
def add_constant(self, name, value):
149151
"""Add a constant to the FieldSet. Note that all constants are
@@ -238,22 +240,62 @@ def from_copernicusmarine(ds: xr.Dataset):
238240

239241
fields = {}
240242
if "U" in ds.data_vars and "V" in ds.data_vars:
241-
fields["U"] = Field("U", ds["U"], grid)
242-
fields["V"] = Field("V", ds["V"], grid)
243+
fields["U"] = Field("U", ds["U"], grid, XLinear)
244+
fields["V"] = Field("V", ds["V"], grid, XLinear)
243245
fields["U"].units = GeographicPolar()
244246
fields["V"].units = Geographic()
245247

246248
if "W" in ds.data_vars:
247249
ds["W"] -= ds[
248250
"W"
249251
] # Negate W to convert from up positive to down positive (as that's the direction of positive z)
250-
fields["W"] = Field("W", ds["W"], grid)
252+
fields["W"] = Field("W", ds["W"], grid, XLinear)
253+
fields["UVW"] = VectorField("UVW", fields["U"], fields["V"], fields["W"])
254+
else:
255+
fields["UV"] = VectorField("UV", fields["U"], fields["V"])
256+
257+
for varname in set(ds.data_vars) - set(fields.keys()):
258+
fields[varname] = Field(varname, ds[varname], grid, XLinear)
259+
260+
return FieldSet(list(fields.values()))
261+
262+
def from_fesom2(ds: ux.UxDataset):
263+
"""Create a FieldSet from a FESOM2 uxarray.UxDataset.
264+
265+
Parameters
266+
----------
267+
ds : uxarray.UxDataset
268+
uxarray.UxDataset as obtained from the uxarray package.
269+
270+
Returns
271+
-------
272+
FieldSet
273+
FieldSet object containing the fields from the dataset that can be used for a Parcels simulation.
274+
"""
275+
ds = ds.copy()
276+
ds_dims = list(ds.dims)
277+
if not all(dim in ds_dims for dim in ["time", "nz", "nz1"]):
278+
raise ValueError(
279+
f"Dataset missing one of the required dimensions 'time', 'nz', or 'nz1'. Found dimensions {ds_dims}"
280+
)
281+
grid = UxGrid(ds.uxgrid, z=ds.coords["nz"])
282+
ds = _discover_fesom2_U_and_V(ds)
283+
284+
fields = {}
285+
if "U" in ds.data_vars and "V" in ds.data_vars:
286+
fields["U"] = Field("U", ds["U"], grid, _select_uxinterpolator(ds["U"]))
287+
fields["V"] = Field("V", ds["V"], grid, _select_uxinterpolator(ds["U"]))
288+
fields["U"].units = GeographicPolar()
289+
fields["V"].units = Geographic()
290+
291+
if "W" in ds.data_vars:
292+
fields["W"] = Field("W", ds["W"], grid, _select_uxinterpolator(ds["U"]))
251293
fields["UVW"] = VectorField("UVW", fields["U"], fields["V"], fields["W"])
252294
else:
253295
fields["UV"] = VectorField("UV", fields["U"], fields["V"])
254296

255297
for varname in set(ds.data_vars) - set(fields.keys()):
256-
fields[varname] = Field(varname, ds[varname], grid)
298+
fields[varname] = Field(varname, ds[varname], grid, _select_uxinterpolator(ds[varname]))
257299

258300
return FieldSet(list(fields.values()))
259301

@@ -365,11 +407,86 @@ def _discover_copernicusmarine_U_and_V(ds: xr.Dataset) -> xr.Dataset:
365407
return ds
366408

367409

368-
def _ds_rename_using_standard_names(ds: xr.Dataset, name_dict: dict[str, str]) -> xr.Dataset:
410+
def _discover_fesom2_U_and_V(ds: ux.UxDataset) -> ux.UxDataset:
411+
# Common variable names for U and V found in UxDatasets
412+
common_fesom_UV = [("unod", "vnod"), ("u", "v")]
413+
common_fesom_W = ["w"]
414+
415+
if "W" not in ds:
416+
for common_W in common_fesom_W:
417+
if common_W in ds:
418+
ds = _ds_rename_using_standard_names(ds, {common_W: "W"})
419+
break
420+
421+
if "U" in ds and "V" in ds:
422+
return ds # U and V already present
423+
elif "U" in ds or "V" in ds:
424+
raise ValueError(
425+
"Dataset has only one of the two variables 'U' and 'V'. Please rename the appropriate variable in your dataset to have both 'U' and 'V' for Parcels simulation."
426+
)
427+
428+
for common_U, common_V in common_fesom_UV:
429+
if common_U in ds:
430+
if common_V not in ds:
431+
raise ValueError(
432+
f"Dataset has variable with standard name {common_U!r}, "
433+
f"but not the matching variable with standard name {common_V!r}. "
434+
"Please rename the appropriate variables in your dataset to have both 'U' and 'V' for Parcels simulation."
435+
)
436+
else:
437+
ds = _ds_rename_using_standard_names(ds, {common_U: "U", common_V: "V"})
438+
break
439+
440+
else:
441+
if common_V in ds:
442+
raise ValueError(
443+
f"Dataset has variable with standard name {common_V!r}, "
444+
f"but not the matching variable with standard name {common_U!r}. "
445+
"Please rename the appropriate variables in your dataset to have both 'U' and 'V' for Parcels simulation."
446+
)
447+
continue
448+
449+
return ds
450+
451+
452+
def _ds_rename_using_standard_names(ds: xr.Dataset | ux.UxDataset, name_dict: dict[str, str]) -> xr.Dataset:
369453
for standard_name, rename_to in name_dict.items():
370454
name = ds.cf[standard_name].name
371455
ds = ds.rename({name: rename_to})
372456
logger.info(
373457
f"cf_xarray found variable {name!r} with CF standard name {standard_name!r} in dataset, renamed it to {rename_to!r} for Parcels simulation."
374458
)
375459
return ds
460+
461+
462+
def _select_uxinterpolator(da: ux.UxDataArray):
463+
"""Selects the appropriate uxarray interpolator for a given uxarray UxDataArray"""
464+
supported_uxinterp_mapping = {
465+
# (nz1,n_face): face-center laterally, layer centers vertically — piecewise constant
466+
"nz1,n_face": UXPiecewiseConstantFace,
467+
# (nz,n_node): node/corner laterally, layer interfaces vertically — barycentric lateral & linear vertical
468+
"nz,n_node": UXPiecewiseLinearNode,
469+
}
470+
# Extract only spatial dimensions, neglecting time
471+
da_spatial_dims = tuple(d for d in da.dims if d not in ("time",))
472+
if len(da_spatial_dims) != 2:
473+
raise ValueError(
474+
"Fields on unstructured grids must have two spatial dimensions, one vertical (nz or nz1) and one lateral (n_face, n_edge, or n_node)"
475+
)
476+
477+
# Construct key (string) for mapping to interpolator
478+
# Find vertical and lateral tokens
479+
vdim = None
480+
ldim = None
481+
for d in da_spatial_dims:
482+
if d in ("nz", "nz1"):
483+
vdim = d
484+
if d in ("n_face", "n_node"):
485+
ldim = d
486+
# Map to supported interpolators
487+
if vdim and ldim:
488+
key = f"{vdim},{ldim}"
489+
if key in supported_uxinterp_mapping.keys():
490+
return supported_uxinterp_mapping[key]
491+
492+
return None

src/parcels/_core/xgrid.py

Lines changed: 24 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -289,22 +289,13 @@ def search(self, z, y, x, ei=None):
289289
else:
290290
zi, zeta = np.zeros(z.shape, dtype=int), np.zeros(z.shape, dtype=float)
291291

292-
if ds.lon.ndim == 1:
293-
yi, eta = _search_1d_array(ds.lat.values, y)
294-
xi, xsi = _search_1d_array(ds.lon.values, x)
295-
return {
296-
"Z": {"index": zi, "bcoord": zeta},
297-
"Y": {"index": yi, "bcoord": eta},
298-
"X": {"index": xi, "bcoord": xsi},
299-
}
292+
if "X" in self.axes and "Y" in self.axes and ds.lon.ndim == 2:
293+
yi, xi = None, None
294+
if ei is not None:
295+
axis_indices = self.unravel_index(ei)
296+
xi = axis_indices.get("X")
297+
yi = axis_indices.get("Y")
300298

301-
yi, xi = None, None
302-
if ei is not None:
303-
axis_indices = self.unravel_index(ei)
304-
xi = axis_indices.get("X")
305-
yi = axis_indices.get("Y")
306-
307-
if ds.lon.ndim == 2:
308299
yi, eta, xi, xsi = _search_indices_curvilinear_2d(self, y, x, yi, xi)
309300

310301
return {
@@ -313,7 +304,24 @@ def search(self, z, y, x, ei=None):
313304
"X": {"index": xi, "bcoord": xsi},
314305
}
315306

316-
raise NotImplementedError("Searching in >2D lon/lat arrays is not implemented yet.")
307+
if "X" in self.axes and ds.lon.ndim > 2:
308+
raise NotImplementedError("Searching in >2D lon/lat arrays is not implemented yet.")
309+
310+
if "Y" in self.axes:
311+
yi, eta = _search_1d_array(ds.lat.values, y)
312+
else:
313+
yi, eta = np.zeros(y.shape, dtype=int), np.zeros(y.shape, dtype=float)
314+
315+
if "X" in self.axes:
316+
xi, xsi = _search_1d_array(ds.lon.values, x)
317+
else:
318+
xi, xsi = np.zeros(x.shape, dtype=int), np.zeros(x.shape, dtype=float)
319+
320+
return {
321+
"Z": {"index": zi, "bcoord": zeta},
322+
"Y": {"index": yi, "bcoord": eta},
323+
"X": {"index": xi, "bcoord": xsi},
324+
}
317325

318326
@cached_property
319327
def _fpoint_info(self):

0 commit comments

Comments
 (0)