Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/community/v4-migration.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat
- The `time` argument in the Kernel signature has been removed in the Kernel API, so can't be used. Use `particle.time` instead.
- The `particle` argument in the Kernel signature has been renamed to `particles`.
- `math` functions should be replaced with array compatible equivalents (e.g., `math.sin` -> `np.sin`). Instead of `ParcelsRandom` you should use numpy's random functions.
- `particle.depth` has been changed to `particles.z` to be consistent with the [CF conventions for trajectory data](https://cfconventions.org/cf-conventions/cf-conventions.html#trajectory-data), and to make Parcels also generalizable to atmospheric contexts.

## FieldSet

Expand Down
4 changes: 2 additions & 2 deletions parcels/_core/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def __getitem__(self, key):
self._check_velocitysampling()
try:
if isinstance(key, KernelParticle):
return self.eval(key.time, key.depth, key.lat, key.lon, key)
return self.eval(key.time, key.z, key.lat, key.lon, key)
else:
return self.eval(*key)
except tuple(AllParcelsErrorCodes.keys()) as error:
Expand Down Expand Up @@ -334,7 +334,7 @@ def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True):
def __getitem__(self, key):
try:
if isinstance(key, KernelParticle):
return self.eval(key.time, key.depth, key.lat, key.lon, key)
return self.eval(key.time, key.z, key.lat, key.lon, key)
else:
return self.eval(*key)
except tuple(AllParcelsErrorCodes.keys()) as error:
Expand Down
2 changes: 1 addition & 1 deletion parcels/_core/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def from_copernicusmarine(ds: xr.Dataset):
if "W" in ds.data_vars:
ds["W"] -= ds[
"W"
] # Negate W to convert from up positive to down positive (as that's the direction of positive depth)
] # Negate W to convert from up positive to down positive (as that's the direction of positive z)
fields["W"] = Field("W", ds["W"], grid)
fields["UVW"] = VectorField("UVW", fields["U"], fields["V"], fields["W"])
else:
Expand Down
6 changes: 3 additions & 3 deletions parcels/_core/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,11 @@ def Setcoords(particles, fieldset): # pragma: no cover

particles.lon += particles.dlon
particles.lat += particles.dlat
particles.depth += particles.ddepth
particles.z += particles.dz

particles.dlon = 0
particles.dlat = 0
particles.ddepth = 0
particles.dz = 0

particles.time = particles.time_nextloop

Expand Down Expand Up @@ -286,6 +286,6 @@ def execute(self, pset, endtime, dt):
if error_code == StatusCode.ErrorTimeExtrapolation:
error_func(pset[inds].time)
else:
error_func(pset[inds].depth, pset[inds].lat, pset[inds].lon)
error_func(pset[inds].z, pset[inds].lat, pset[inds].lon)

return pset
6 changes: 3 additions & 3 deletions parcels/_core/particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,13 +176,13 @@ def get_default_particle(spatial_dtype: np.float32 | np.float64) -> ParticleClas
attrs={"standard_name": "latitude", "units": "degrees_north", "axis": "Y"},
),
Variable(
"depth",
"z",
dtype=spatial_dtype,
attrs={"standard_name": "depth", "units": "m", "positive": "down"},
attrs={"standard_name": "vertical coordinate", "units": "m", "positive": "down"},
),
Variable("dlon", dtype=spatial_dtype, to_write=False),
Variable("dlat", dtype=spatial_dtype, to_write=False),
Variable("ddepth", dtype=spatial_dtype, to_write=False),
Variable("dz", dtype=spatial_dtype, to_write=False),
Variable(
"time",
dtype=_SAME_AS_FIELDSET_TIME_INTERVAL.VALUE,
Expand Down
28 changes: 10 additions & 18 deletions parcels/_core/particlefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,12 +126,6 @@ def store(self):
def create_new_zarrfile(self):
return self._create_new_zarrfile

def _convert_varout_name(self, var):
if var == "depth":
return "z"
else:
return var

def _extend_zarr_dims(self, Z, store, dtype, axis):
if axis == 1:
a = np.full((Z.shape[0], self.chunks[1]), _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype)
Expand Down Expand Up @@ -214,8 +208,7 @@ def _write_particle_data(self, *, particle_data, pclass, time_interval, time, in
obs = np.zeros((self._maxids), dtype=np.int32)
for var in vars_to_write:
dtype = _maybe_convert_time_dtype(var.dtype)
varout = self._convert_varout_name(var.name)
if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate
if var.name not in ["trajectory"]: # because 'trajectory' is written as coordinate
if var.to_write == "once":
data = np.full(
(arrsize[0],),
Expand All @@ -228,25 +221,24 @@ def _write_particle_data(self, *, particle_data, pclass, time_interval, time, in
data = np.full(arrsize, _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype)
data[ids, 0] = particle_data[var.name][indices_to_write]
dims = ["trajectory", "obs"]
ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[var.name])
ds[varout].encoding["chunks"] = self.chunks[0] if var.to_write == "once" else self.chunks # type: ignore[index]
ds[var.name] = xr.DataArray(data=data, dims=dims, attrs=attrs[var.name])
ds[var.name].encoding["chunks"] = self.chunks[0] if var.to_write == "once" else self.chunks # type: ignore[index]
ds.to_zarr(store, mode="w")
self._create_new_zarrfile = False
else:
Z = zarr.group(store=store, overwrite=False)
obs = particle_data["obs_written"][indices_to_write]
for var in vars_to_write:
dtype = _maybe_convert_time_dtype(var.dtype)
varout = self._convert_varout_name(var.name)
if self._maxids > Z[varout].shape[0]:
self._extend_zarr_dims(Z[varout], store, dtype=dtype, axis=0)
if self._maxids > Z[var.name].shape[0]:
self._extend_zarr_dims(Z[var.name], store, dtype=dtype, axis=0)
if var.to_write == "once":
if len(once_ids) > 0:
Z[varout].vindex[ids_once] = particle_data[var.name][indices_to_write_once]
Z[var.name].vindex[ids_once] = particle_data[var.name][indices_to_write_once]
else:
if max(obs) >= Z[varout].shape[1]: # type: ignore[type-var]
self._extend_zarr_dims(Z[varout], store, dtype=dtype, axis=1)
Z[varout].vindex[ids, obs] = particle_data[var.name][indices_to_write]
if max(obs) >= Z[var.name].shape[1]: # type: ignore[type-var]
self._extend_zarr_dims(Z[var.name], store, dtype=dtype, axis=1)
Z[var.name].vindex[ids, obs] = particle_data[var.name][indices_to_write]

particle_data["obs_written"][indices_to_write] = obs + 1

Expand All @@ -262,7 +254,7 @@ def write_latest_locations(self, pset, time):
time :
Time at which to write ParticleSet. Note that typically this would be pset.time_nextloop
"""
for var in ["lon", "lat", "depth"]:
for var in ["lon", "lat", "z"]:
pset._data[f"{var}"] += pset._data[f"d{var}"]
pset._data["time"] = pset._data["time_nextloop"]
self.write(pset, time)
Expand Down
39 changes: 14 additions & 25 deletions parcels/_core/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,12 @@ class ParticleSet:
List of initial longitude values for particles
lat :
List of initial latitude values for particles
depth :
Optional list of initial depth values for particles. Default is 0m
z :
Optional list of initial z values for particles. Default is 0m
time :
Optional list of initial time values for particles. Default is fieldset.U.grid.time[0]
repeatdt : datetime.timedelta or float, optional
Optional interval on which to repeat the release of the ParticleSet. Either timedelta object, or float in seconds.
lonlatdepth_dtype :
Floating precision for lon, lat, depth particle coordinates.
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
and np.float64 if the interpolation method is 'cgrid_velocity'
trajectory_ids :
Optional list of "trajectory" values (integers) for the particle IDs
partition_function :
Expand All @@ -66,7 +62,7 @@ def __init__(
pclass=Particle,
lon=None,
lat=None,
depth=None,
z=None,
time=None,
trajectory_ids=None,
**kwargs,
Expand All @@ -84,15 +80,15 @@ def __init__(
if trajectory_ids is None:
trajectory_ids = np.arange(lon.size)

if depth is None:
mindepth = 0
if z is None:
minz = 0
for field in self.fieldset.fields.values():
if field.grid.depth is not None:
mindepth = min(mindepth, field.grid.depth[0])
depth = np.ones(lon.size) * mindepth
minz = min(minz, field.grid.depth[0])
z = np.ones(lon.size) * minz
else:
depth = _convert_to_flat_array(depth)
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts"
z = _convert_to_flat_array(z)
assert lon.size == lat.size and lon.size == z.size, "lon, lat, z don't all have the same lenghts"

if time is None or len(time) == 0:
# do not set a time yet (because sign_dt not known)
Expand All @@ -106,7 +102,7 @@ def __init__(
raise TypeError("particle time must be a datetime, timedelta, or date object")
time = np.repeat(time, lon.size) if time.size == 1 else time

assert lon.size == time.size, "time and positions (lon, lat, depth) do not have the same lengths."
assert lon.size == time.size, "time and positions (lon, lat, z) do not have the same lengths."

if fieldset.time_interval:
_warn_particle_times_outside_fieldset_time_bounds(time, fieldset.time_interval)
Expand All @@ -115,7 +111,7 @@ def __init__(
if kwvar not in ["partition_function"]:
kwargs[kwvar] = _convert_to_flat_array(kwargs[kwvar])
assert lon.size == kwargs[kwvar].size, (
f"{kwvar} and positions (lon, lat, depth) don't have the same lengths."
f"{kwvar} and positions (lon, lat, z) don't have the same lengths."
)

self._data = create_particle_data(
Expand All @@ -126,7 +122,7 @@ def __init__(
initial=dict(
lon=lon,
lat=lat,
depth=depth,
z=z,
time=time,
time_nextloop=time,
trajectory=trajectory_ids,
Expand Down Expand Up @@ -182,13 +178,6 @@ def __setattr__(self, name, value):
else:
object.__setattr__(self, name, value)

@staticmethod
def lonlatdepth_dtype_from_field_interp_method(field):
# TODO update this when now interp methods are implemented
if field.interp_method == "cgrid_velocity":
return np.float64
return np.float32

@property
def size(self):
return len(self)
Expand Down Expand Up @@ -277,7 +266,7 @@ def _compute_neighbor_tree(self, time, dt):

self._values = np.vstack(
(
self._data["depth"],
self._data["z"],
self._data["lat"],
self._data["lon"],
)
Expand Down Expand Up @@ -306,7 +295,7 @@ def _neighbors_by_coor(self, coor):
def populate_indices(self):
"""Pre-populate guesses of particle ei (element id) indices"""
for i, grid in enumerate(self.fieldset.gridset):
position = grid.search(self.depth, self.lat, self.lon)
position = grid.search(self.z, self.lat, self.lon)
self._data["ei"][:, i] = grid.ravel_index(
{
"X": position["X"][0],
Expand Down
10 changes: 5 additions & 5 deletions parcels/_core/statuscodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class FieldInterpolationError(RuntimeError):


def _raise_field_interpolation_error(z, y, x):
raise FieldInterpolationError(f"Field interpolation returned NaN at (depth={z}, lat={y}, lon={x})")
raise FieldInterpolationError(f"Field interpolation returned NaN at (z={z}, lat={y}, lon={x})")


class FieldOutOfBoundError(RuntimeError):
Expand All @@ -50,7 +50,7 @@ class FieldOutOfBoundError(RuntimeError):


def _raise_field_out_of_bound_error(z, y, x):
raise FieldOutOfBoundError(f"Field sampled out-of-bound, at (depth={z}, lat={y}, lon={x})")
raise FieldOutOfBoundError(f"Field sampled out-of-bound, at (z={z}, lat={y}, lon={x})")


class FieldOutOfBoundSurfaceError(RuntimeError):
Expand All @@ -64,7 +64,7 @@ def format_out(val):
return "unknown" if val is None else val

raise FieldOutOfBoundSurfaceError(
f"Field sampled out-of-bound at the surface, at (depth={format_out(z)}, lat={format_out(y)}, lon={format_out(x)})"
f"Field sampled out-of-bound at the surface, at (z={format_out(z)}, lat={format_out(y)}, lon={format_out(x)})"
)


Expand All @@ -81,7 +81,7 @@ class GridSearchingError(RuntimeError):


def _raise_grid_searching_error(z, y, x):
raise GridSearchingError(f"Grid searching failed at (depth={z}, lat={y}, lon={x})")
raise GridSearchingError(f"Grid searching failed at (z={z}, lat={y}, lon={x})")


class GeneralError(RuntimeError):
Expand All @@ -91,7 +91,7 @@ class GeneralError(RuntimeError):


def _raise_general_error(z, y, x):
raise GeneralError(f"General error occurred at (depth={z}, lat={y}, lon={x})")
raise GeneralError(f"General error occurred at (z={z}, lat={y}, lon={x})")


class TimeExtrapolationError(RuntimeError):
Expand Down
18 changes: 9 additions & 9 deletions parcels/interpolators.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,18 +77,18 @@ def _get_corner_data_Agrid(
ti_1 = np.clip(ti + 1, 0, data.shape[0] - 1)
ti = np.concatenate([np.repeat(ti, lenZ * 4), np.repeat(ti_1, lenZ * 4)])

# Depth coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels
# Z coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels
if lenZ == 1:
zi = np.repeat(zi, lenT * 4)
else:
zi_1 = np.clip(zi + 1, 0, data.shape[1] - 1)
zi = np.tile(np.array([zi, zi, zi, zi, zi_1, zi_1, zi_1, zi_1]).flatten(), lenT)

# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth
# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/z
yi_1 = np.clip(yi + 1, 0, data.shape[2] - 1)
yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT) * (lenZ))

# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth
# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/z
xi_1 = np.clip(xi + 1, 0, data.shape[3] - 1)
xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT) * (lenZ))

Expand Down Expand Up @@ -211,17 +211,17 @@ def CGrid_Velocity(
ti_1 = np.clip(ti + 1, 0, tdim - 1)
ti_full = np.concatenate([np.repeat(ti, 4), np.repeat(ti_1, 4)])

# Depth coordinates: 4 points at zi, repeated for both time levels
# Z coordinates: 4 points at zi, repeated for both time levels
zi_full = np.repeat(zi, lenT * 4)

# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth
# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/z
yi_1 = np.clip(yi + 1, 0, ydim - 1)
yi_full = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT))
# # TODO check why in some cases minus needed here!!!
# yi_minus_1 = np.clip(yi - 1, 0, ydim - 1)
# yi = np.tile(np.repeat(np.column_stack([yi_minus_1, yi]), 2), (lenT))

# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth
# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/z
xi_1 = np.clip(xi + 1, 0, xdim - 1)
xi_full = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT))

Expand Down Expand Up @@ -313,15 +313,15 @@ def CGrid_Velocity(
ti_1 = np.clip(ti + 1, 0, tdim - 1)
ti_full = np.concatenate([np.repeat(ti, 2), np.repeat(ti_1, 2)])

# Depth coordinates: 1 points at zi, repeated for both time levels
# Z coordinates: 1 points at zi, repeated for both time levels
zi_1 = np.clip(zi + 1, 0, zdim - 1)
zi_full = np.tile(np.array([zi, zi_1]).flatten(), lenT)

# Y coordinates: yi+1 for each spatial point, repeated for time/depth
# Y coordinates: yi+1 for each spatial point, repeated for time/z
yi_1 = np.clip(yi + 1, 0, ydim - 1)
yi_full = np.tile(yi_1, (lenT) * 2)

# X coordinates: xi+1 for each spatial point, repeated for time/depth
# X coordinates: xi+1 for each spatial point, repeated for time/z
xi_1 = np.clip(xi + 1, 0, xdim - 1)
xi_full = np.tile(xi_1, (lenT) * 2)

Expand Down
Loading