Skip to content

Commit 3a05d61

Browse files
Merge pull request #2303 from OceanParcels/API_change_pset_depth_to_z
Changing pset.depth to pset.z throughout code
2 parents 37cbace + 902a79e commit 3a05d61

17 files changed

+119
-151
lines changed

docs/community/v4-migration.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat
1414
- The `time` argument in the Kernel signature has been removed in the Kernel API, so can't be used. Use `particle.time` instead.
1515
- The `particle` argument in the Kernel signature has been renamed to `particles`.
1616
- `math` functions should be replaced with array compatible equivalents (e.g., `math.sin` -> `np.sin`). Instead of `ParcelsRandom` you should use numpy's random functions.
17+
- `particle.depth` has been changed to `particles.z` to be consistent with the [CF conventions for trajectory data](https://cfconventions.org/cf-conventions/cf-conventions.html#trajectory-data), and to make Parcels also generalizable to atmospheric contexts.
1718

1819
## FieldSet
1920

parcels/_core/field.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ def __getitem__(self, key):
233233
self._check_velocitysampling()
234234
try:
235235
if isinstance(key, KernelParticle):
236-
return self.eval(key.time, key.depth, key.lat, key.lon, key)
236+
return self.eval(key.time, key.z, key.lat, key.lon, key)
237237
else:
238238
return self.eval(*key)
239239
except tuple(AllParcelsErrorCodes.keys()) as error:
@@ -334,7 +334,7 @@ def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True):
334334
def __getitem__(self, key):
335335
try:
336336
if isinstance(key, KernelParticle):
337-
return self.eval(key.time, key.depth, key.lat, key.lon, key)
337+
return self.eval(key.time, key.z, key.lat, key.lon, key)
338338
else:
339339
return self.eval(*key)
340340
except tuple(AllParcelsErrorCodes.keys()) as error:

parcels/_core/fieldset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ def from_copernicusmarine(ds: xr.Dataset):
240240
if "W" in ds.data_vars:
241241
ds["W"] -= ds[
242242
"W"
243-
] # Negate W to convert from up positive to down positive (as that's the direction of positive depth)
243+
] # Negate W to convert from up positive to down positive (as that's the direction of positive z)
244244
fields["W"] = Field("W", ds["W"], grid)
245245
fields["UVW"] = VectorField("UVW", fields["U"], fields["V"], fields["W"])
246246
else:

parcels/_core/kernel.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,11 +120,11 @@ def Setcoords(particles, fieldset): # pragma: no cover
120120

121121
particles.lon += particles.dlon
122122
particles.lat += particles.dlat
123-
particles.depth += particles.ddepth
123+
particles.z += particles.dz
124124

125125
particles.dlon = 0
126126
particles.dlat = 0
127-
particles.ddepth = 0
127+
particles.dz = 0
128128

129129
particles.time = particles.time_nextloop
130130

@@ -286,6 +286,6 @@ def execute(self, pset, endtime, dt):
286286
if error_code == StatusCode.ErrorTimeExtrapolation:
287287
error_func(pset[inds].time)
288288
else:
289-
error_func(pset[inds].depth, pset[inds].lat, pset[inds].lon)
289+
error_func(pset[inds].z, pset[inds].lat, pset[inds].lon)
290290

291291
return pset

parcels/_core/particle.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -176,13 +176,13 @@ def get_default_particle(spatial_dtype: np.float32 | np.float64) -> ParticleClas
176176
attrs={"standard_name": "latitude", "units": "degrees_north", "axis": "Y"},
177177
),
178178
Variable(
179-
"depth",
179+
"z",
180180
dtype=spatial_dtype,
181-
attrs={"standard_name": "depth", "units": "m", "positive": "down"},
181+
attrs={"standard_name": "vertical coordinate", "units": "m", "positive": "down"},
182182
),
183183
Variable("dlon", dtype=spatial_dtype, to_write=False),
184184
Variable("dlat", dtype=spatial_dtype, to_write=False),
185-
Variable("ddepth", dtype=spatial_dtype, to_write=False),
185+
Variable("dz", dtype=spatial_dtype, to_write=False),
186186
Variable(
187187
"time",
188188
dtype=_SAME_AS_FIELDSET_TIME_INTERVAL.VALUE,

parcels/_core/particlefile.py

Lines changed: 10 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -126,12 +126,6 @@ def store(self):
126126
def create_new_zarrfile(self):
127127
return self._create_new_zarrfile
128128

129-
def _convert_varout_name(self, var):
130-
if var == "depth":
131-
return "z"
132-
else:
133-
return var
134-
135129
def _extend_zarr_dims(self, Z, store, dtype, axis):
136130
if axis == 1:
137131
a = np.full((Z.shape[0], self.chunks[1]), _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype)
@@ -214,8 +208,7 @@ def _write_particle_data(self, *, particle_data, pclass, time_interval, time, in
214208
obs = np.zeros((self._maxids), dtype=np.int32)
215209
for var in vars_to_write:
216210
dtype = _maybe_convert_time_dtype(var.dtype)
217-
varout = self._convert_varout_name(var.name)
218-
if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate
211+
if var.name not in ["trajectory"]: # because 'trajectory' is written as coordinate
219212
if var.to_write == "once":
220213
data = np.full(
221214
(arrsize[0],),
@@ -228,25 +221,24 @@ def _write_particle_data(self, *, particle_data, pclass, time_interval, time, in
228221
data = np.full(arrsize, _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype)
229222
data[ids, 0] = particle_data[var.name][indices_to_write]
230223
dims = ["trajectory", "obs"]
231-
ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[var.name])
232-
ds[varout].encoding["chunks"] = self.chunks[0] if var.to_write == "once" else self.chunks # type: ignore[index]
224+
ds[var.name] = xr.DataArray(data=data, dims=dims, attrs=attrs[var.name])
225+
ds[var.name].encoding["chunks"] = self.chunks[0] if var.to_write == "once" else self.chunks # type: ignore[index]
233226
ds.to_zarr(store, mode="w")
234227
self._create_new_zarrfile = False
235228
else:
236229
Z = zarr.group(store=store, overwrite=False)
237230
obs = particle_data["obs_written"][indices_to_write]
238231
for var in vars_to_write:
239232
dtype = _maybe_convert_time_dtype(var.dtype)
240-
varout = self._convert_varout_name(var.name)
241-
if self._maxids > Z[varout].shape[0]:
242-
self._extend_zarr_dims(Z[varout], store, dtype=dtype, axis=0)
233+
if self._maxids > Z[var.name].shape[0]:
234+
self._extend_zarr_dims(Z[var.name], store, dtype=dtype, axis=0)
243235
if var.to_write == "once":
244236
if len(once_ids) > 0:
245-
Z[varout].vindex[ids_once] = particle_data[var.name][indices_to_write_once]
237+
Z[var.name].vindex[ids_once] = particle_data[var.name][indices_to_write_once]
246238
else:
247-
if max(obs) >= Z[varout].shape[1]: # type: ignore[type-var]
248-
self._extend_zarr_dims(Z[varout], store, dtype=dtype, axis=1)
249-
Z[varout].vindex[ids, obs] = particle_data[var.name][indices_to_write]
239+
if max(obs) >= Z[var.name].shape[1]: # type: ignore[type-var]
240+
self._extend_zarr_dims(Z[var.name], store, dtype=dtype, axis=1)
241+
Z[var.name].vindex[ids, obs] = particle_data[var.name][indices_to_write]
250242

251243
particle_data["obs_written"][indices_to_write] = obs + 1
252244

@@ -262,7 +254,7 @@ def write_latest_locations(self, pset, time):
262254
time :
263255
Time at which to write ParticleSet. Note that typically this would be pset.time_nextloop
264256
"""
265-
for var in ["lon", "lat", "depth"]:
257+
for var in ["lon", "lat", "z"]:
266258
pset._data[f"{var}"] += pset._data[f"d{var}"]
267259
pset._data["time"] = pset._data["time_nextloop"]
268260
self.write(pset, time)

parcels/_core/particleset.py

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -39,16 +39,12 @@ class ParticleSet:
3939
List of initial longitude values for particles
4040
lat :
4141
List of initial latitude values for particles
42-
depth :
43-
Optional list of initial depth values for particles. Default is 0m
42+
z :
43+
Optional list of initial z values for particles. Default is 0m
4444
time :
4545
Optional list of initial time values for particles. Default is fieldset.U.grid.time[0]
4646
repeatdt : datetime.timedelta or float, optional
4747
Optional interval on which to repeat the release of the ParticleSet. Either timedelta object, or float in seconds.
48-
lonlatdepth_dtype :
49-
Floating precision for lon, lat, depth particle coordinates.
50-
It is either np.float32 or np.float64. Default is np.float32 if fieldset.U.interp_method is 'linear'
51-
and np.float64 if the interpolation method is 'cgrid_velocity'
5248
trajectory_ids :
5349
Optional list of "trajectory" values (integers) for the particle IDs
5450
partition_function :
@@ -66,7 +62,7 @@ def __init__(
6662
pclass=Particle,
6763
lon=None,
6864
lat=None,
69-
depth=None,
65+
z=None,
7066
time=None,
7167
trajectory_ids=None,
7268
**kwargs,
@@ -84,15 +80,15 @@ def __init__(
8480
if trajectory_ids is None:
8581
trajectory_ids = np.arange(lon.size)
8682

87-
if depth is None:
88-
mindepth = 0
83+
if z is None:
84+
minz = 0
8985
for field in self.fieldset.fields.values():
9086
if field.grid.depth is not None:
91-
mindepth = min(mindepth, field.grid.depth[0])
92-
depth = np.ones(lon.size) * mindepth
87+
minz = min(minz, field.grid.depth[0])
88+
z = np.ones(lon.size) * minz
9389
else:
94-
depth = _convert_to_flat_array(depth)
95-
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts"
90+
z = _convert_to_flat_array(z)
91+
assert lon.size == lat.size and lon.size == z.size, "lon, lat, z don't all have the same lenghts"
9692

9793
if time is None or len(time) == 0:
9894
# do not set a time yet (because sign_dt not known)
@@ -106,7 +102,7 @@ def __init__(
106102
raise TypeError("particle time must be a datetime, timedelta, or date object")
107103
time = np.repeat(time, lon.size) if time.size == 1 else time
108104

109-
assert lon.size == time.size, "time and positions (lon, lat, depth) do not have the same lengths."
105+
assert lon.size == time.size, "time and positions (lon, lat, z) do not have the same lengths."
110106

111107
if fieldset.time_interval:
112108
_warn_particle_times_outside_fieldset_time_bounds(time, fieldset.time_interval)
@@ -115,7 +111,7 @@ def __init__(
115111
if kwvar not in ["partition_function"]:
116112
kwargs[kwvar] = _convert_to_flat_array(kwargs[kwvar])
117113
assert lon.size == kwargs[kwvar].size, (
118-
f"{kwvar} and positions (lon, lat, depth) don't have the same lengths."
114+
f"{kwvar} and positions (lon, lat, z) don't have the same lengths."
119115
)
120116

121117
self._data = create_particle_data(
@@ -126,7 +122,7 @@ def __init__(
126122
initial=dict(
127123
lon=lon,
128124
lat=lat,
129-
depth=depth,
125+
z=z,
130126
time=time,
131127
time_nextloop=time,
132128
trajectory=trajectory_ids,
@@ -182,13 +178,6 @@ def __setattr__(self, name, value):
182178
else:
183179
object.__setattr__(self, name, value)
184180

185-
@staticmethod
186-
def lonlatdepth_dtype_from_field_interp_method(field):
187-
# TODO update this when now interp methods are implemented
188-
if field.interp_method == "cgrid_velocity":
189-
return np.float64
190-
return np.float32
191-
192181
@property
193182
def size(self):
194183
return len(self)
@@ -277,7 +266,7 @@ def _compute_neighbor_tree(self, time, dt):
277266

278267
self._values = np.vstack(
279268
(
280-
self._data["depth"],
269+
self._data["z"],
281270
self._data["lat"],
282271
self._data["lon"],
283272
)
@@ -306,7 +295,7 @@ def _neighbors_by_coor(self, coor):
306295
def populate_indices(self):
307296
"""Pre-populate guesses of particle ei (element id) indices"""
308297
for i, grid in enumerate(self.fieldset.gridset):
309-
position = grid.search(self.depth, self.lat, self.lon)
298+
position = grid.search(self.z, self.lat, self.lon)
310299
self._data["ei"][:, i] = grid.ravel_index(
311300
{
312301
"X": position["X"][0],

parcels/_core/statuscodes.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ class FieldInterpolationError(RuntimeError):
4040

4141

4242
def _raise_field_interpolation_error(z, y, x):
43-
raise FieldInterpolationError(f"Field interpolation returned NaN at (depth={z}, lat={y}, lon={x})")
43+
raise FieldInterpolationError(f"Field interpolation returned NaN at (z={z}, lat={y}, lon={x})")
4444

4545

4646
class FieldOutOfBoundError(RuntimeError):
@@ -50,7 +50,7 @@ class FieldOutOfBoundError(RuntimeError):
5050

5151

5252
def _raise_field_out_of_bound_error(z, y, x):
53-
raise FieldOutOfBoundError(f"Field sampled out-of-bound, at (depth={z}, lat={y}, lon={x})")
53+
raise FieldOutOfBoundError(f"Field sampled out-of-bound, at (z={z}, lat={y}, lon={x})")
5454

5555

5656
class FieldOutOfBoundSurfaceError(RuntimeError):
@@ -64,7 +64,7 @@ def format_out(val):
6464
return "unknown" if val is None else val
6565

6666
raise FieldOutOfBoundSurfaceError(
67-
f"Field sampled out-of-bound at the surface, at (depth={format_out(z)}, lat={format_out(y)}, lon={format_out(x)})"
67+
f"Field sampled out-of-bound at the surface, at (z={format_out(z)}, lat={format_out(y)}, lon={format_out(x)})"
6868
)
6969

7070

@@ -81,7 +81,7 @@ class GridSearchingError(RuntimeError):
8181

8282

8383
def _raise_grid_searching_error(z, y, x):
84-
raise GridSearchingError(f"Grid searching failed at (depth={z}, lat={y}, lon={x})")
84+
raise GridSearchingError(f"Grid searching failed at (z={z}, lat={y}, lon={x})")
8585

8686

8787
class GeneralError(RuntimeError):
@@ -91,7 +91,7 @@ class GeneralError(RuntimeError):
9191

9292

9393
def _raise_general_error(z, y, x):
94-
raise GeneralError(f"General error occurred at (depth={z}, lat={y}, lon={x})")
94+
raise GeneralError(f"General error occurred at (z={z}, lat={y}, lon={x})")
9595

9696

9797
class TimeExtrapolationError(RuntimeError):

parcels/interpolators.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -77,18 +77,18 @@ def _get_corner_data_Agrid(
7777
ti_1 = np.clip(ti + 1, 0, data.shape[0] - 1)
7878
ti = np.concatenate([np.repeat(ti, lenZ * 4), np.repeat(ti_1, lenZ * 4)])
7979

80-
# Depth coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels
80+
# Z coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels
8181
if lenZ == 1:
8282
zi = np.repeat(zi, lenT * 4)
8383
else:
8484
zi_1 = np.clip(zi + 1, 0, data.shape[1] - 1)
8585
zi = np.tile(np.array([zi, zi, zi, zi, zi_1, zi_1, zi_1, zi_1]).flatten(), lenT)
8686

87-
# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth
87+
# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/z
8888
yi_1 = np.clip(yi + 1, 0, data.shape[2] - 1)
8989
yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT) * (lenZ))
9090

91-
# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth
91+
# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/z
9292
xi_1 = np.clip(xi + 1, 0, data.shape[3] - 1)
9393
xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT) * (lenZ))
9494

@@ -211,17 +211,17 @@ def CGrid_Velocity(
211211
ti_1 = np.clip(ti + 1, 0, tdim - 1)
212212
ti_full = np.concatenate([np.repeat(ti, 4), np.repeat(ti_1, 4)])
213213

214-
# Depth coordinates: 4 points at zi, repeated for both time levels
214+
# Z coordinates: 4 points at zi, repeated for both time levels
215215
zi_full = np.repeat(zi, lenT * 4)
216216

217-
# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth
217+
# Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/z
218218
yi_1 = np.clip(yi + 1, 0, ydim - 1)
219219
yi_full = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT))
220220
# # TODO check why in some cases minus needed here!!!
221221
# yi_minus_1 = np.clip(yi - 1, 0, ydim - 1)
222222
# yi = np.tile(np.repeat(np.column_stack([yi_minus_1, yi]), 2), (lenT))
223223

224-
# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth
224+
# X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/z
225225
xi_1 = np.clip(xi + 1, 0, xdim - 1)
226226
xi_full = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT))
227227

@@ -313,15 +313,15 @@ def CGrid_Velocity(
313313
ti_1 = np.clip(ti + 1, 0, tdim - 1)
314314
ti_full = np.concatenate([np.repeat(ti, 2), np.repeat(ti_1, 2)])
315315

316-
# Depth coordinates: 1 points at zi, repeated for both time levels
316+
# Z coordinates: 1 points at zi, repeated for both time levels
317317
zi_1 = np.clip(zi + 1, 0, zdim - 1)
318318
zi_full = np.tile(np.array([zi, zi_1]).flatten(), lenT)
319319

320-
# Y coordinates: yi+1 for each spatial point, repeated for time/depth
320+
# Y coordinates: yi+1 for each spatial point, repeated for time/z
321321
yi_1 = np.clip(yi + 1, 0, ydim - 1)
322322
yi_full = np.tile(yi_1, (lenT) * 2)
323323

324-
# X coordinates: xi+1 for each spatial point, repeated for time/depth
324+
# X coordinates: xi+1 for each spatial point, repeated for time/z
325325
xi_1 = np.clip(xi + 1, 0, xdim - 1)
326326
xi_full = np.tile(xi_1, (lenT) * 2)
327327

0 commit comments

Comments
 (0)