From a2d71bce4f5044daca56443f8ef06e504a9e4a0d Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 17 Jul 2025 08:03:08 +0200 Subject: [PATCH 1/9] Fixing bug where conversion was not applied in field.eval --- parcels/field.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index dba61dbd6..68d6be88e 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -262,17 +262,14 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): if np.isnan(value): # Detect Out-of-bounds sampling and raise exception _raise_field_out_of_bound_error(z, y, x) - else: - return value except (FieldSamplingError, FieldOutOfBoundError, FieldOutOfBoundSurfaceError) as e: e.add_note(f"Error interpolating field '{self.name}'.") raise e if applyConversion: - return self.units.to_target(value, z, y, x) - else: - return value + value = self.units.to_target(value, z, y, x) + return value def __getitem__(self, key): self._check_velocitysampling() @@ -355,7 +352,6 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): else: (u, v, w) = self._vector_interp_method(self, ti, position, time, z, y, x) - # print(u,v) if applyConversion: u = self.U.units.to_target(u, z, y, x) v = self.V.units.to_target(v, z, y, x) From d1a2e79ca223e0a499b6ef61225bc7e7329e2381 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 17 Jul 2025 08:03:32 +0200 Subject: [PATCH 2/9] fixing warning in searching length-1 arrays --- parcels/xgrid.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index 2cfda507e..b307469e9 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -395,6 +395,8 @@ def _search_1d_array( float Barycentric coordinate. """ + if len(arr) < 2: + return 0, 0.0 i = np.argmin(arr <= x) - 1 bcoord = (x - arr[i]) / (arr[i + 1] - arr[i]) return i, bcoord From 5b6cd83836a4a256a87e73ab0d9b6b757a30fb39 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 21 Jul 2025 12:34:10 +0200 Subject: [PATCH 3/9] speeding up time_search by calling numpy arrays directly --- parcels/_index_search.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 4638b1848..dcb42e110 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -44,28 +44,28 @@ def _search_time_index(field: Field, time: datetime): if time not in field.time_interval: _raise_time_extrapolation_error(time, field=None) - time_index = field.data.time <= time + time_index = field.data.time.data <= time if time_index.all(): # If given time > last known field time, use # the last field frame without interpolation - ti = len(field.data.time) - 1 - + ti = len(field.data.time.data) - 1 elif np.logical_not(time_index).all(): # If given time < any time in the field, use # the first field frame without interpolation ti = 0 else: ti = int(time_index.argmin() - 1) if time_index.any() else 0 - if len(field.data.time) == 1: + + if len(field.data.time.data) == 1: tau = 0 - elif ti == len(field.data.time) - 1: + elif ti == len(field.data.time.data) - 1: tau = 1 else: tau = ( - (time - field.data.time[ti]).dt.total_seconds() - / (field.data.time[ti + 1] - field.data.time[ti]).dt.total_seconds() - if field.data.time[ti] != field.data.time[ti + 1] + (time - field.data.time.data[ti]) + / (field.data.time.data[ti + 1] - field.data.time.data[ti]) + if field.data.time.data[ti] != field.data.time.data[ti + 1] else 0 ) return tau, ti From 49dc841c9e74b8bdf5616351ad510b275fae3eee Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 21 Jul 2025 12:42:20 +0200 Subject: [PATCH 4/9] Also using numpy array of dataset in xgrid.search --- parcels/xgrid.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index b307469e9..b530ebcf2 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -212,11 +212,11 @@ def _gtype(self): def search(self, z, y, x, ei=None): ds = self.xgcm_grid._ds - zi, zeta = _search_1d_array(ds.depth.values, z) + zi, zeta = _search_1d_array(ds.depth.data, z) if ds.lon.ndim == 1: - yi, eta = _search_1d_array(ds.lat.values, y) - xi, xsi = _search_1d_array(ds.lon.values, x) + yi, eta = _search_1d_array(ds.lat.data, y) + xi, xsi = _search_1d_array(ds.lon.data, x) return {"Z": (zi, zeta), "Y": (yi, eta), "X": (xi, xsi)} yi, xi = None, None From 4e58b2a39c4d7ddc0307afa4b2355a36c3760527 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 13:42:36 +0000 Subject: [PATCH 5/9] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- parcels/_index_search.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index dcb42e110..80cb8f570 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -63,8 +63,7 @@ def _search_time_index(field: Field, time: datetime): tau = 1 else: tau = ( - (time - field.data.time.data[ti]) - / (field.data.time.data[ti + 1] - field.data.time.data[ti]) + (time - field.data.time.data[ti]) / (field.data.time.data[ti + 1] - field.data.time.data[ti]) if field.data.time.data[ti] != field.data.time.data[ti + 1] else 0 ) From d8e6defc6705189ad5f47c1f49e54ebb0d671b05 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 22 Jul 2025 17:18:59 +0200 Subject: [PATCH 6/9] Adding rudimentary computeTimeChunck functionality in particleset.execute --- parcels/field.py | 18 +++++++++--------- parcels/particleset.py | 10 ++++++++-- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 68d6be88e..63a680e52 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -147,7 +147,7 @@ def __init__( _assert_compatible_combination(data, grid) self.name = name - self.data = data + self.data_full = data self.grid = grid try: @@ -186,7 +186,7 @@ def __init__( else: raise ValueError("Unsupported mesh type in data array attributes. Choose either: 'spherical' or 'flat'") - if "time" not in self.data.dims: + if "time" not in data.dims: raise ValueError("Field is missing a 'time' dimension. ") @property @@ -201,27 +201,27 @@ def units(self, value): @property def xdim(self): - if type(self.data) is xr.DataArray: + if type(self.data_full) is xr.DataArray: return self.grid.xdim else: raise NotImplementedError("xdim not implemented for unstructured grids") @property def ydim(self): - if type(self.data) is xr.DataArray: + if type(self.data_full) is xr.DataArray: return self.grid.ydim else: raise NotImplementedError("ydim not implemented for unstructured grids") @property def zdim(self): - if type(self.data) is xr.DataArray: + if type(self.data_full) is xr.DataArray: return self.grid.zdim else: - if "nz1" in self.data.dims: - return self.data.sizes["nz1"] - elif "nz" in self.data.dims: - return self.data.sizes["nz"] + if "nz1" in self.data_full.dims: + return self.data_full.sizes["nz1"] + elif "nz" in self.data_full.dims: + return self.data_full.sizes["nz"] else: return 0 diff --git a/parcels/particleset.py b/parcels/particleset.py index f018a356d..79c9ad347 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -805,10 +805,16 @@ def execute( time = start_time while sign_dt * (time - end_time) < 0: + + for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields + ti = np.argmin(fld.data_full.time.data <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 + if not hasattr(fld, "data") or fld.data_full.time.data[ti] != fld.data.time.data[0]: + fld.data = fld.data_full.isel({"time": slice(ti, ti + 2)}).load() + if sign_dt > 0: - next_time = end_time # TODO update to min(next_output, end_time) when ParticleFile works + next_time = min(time + dt, end_time) else: - next_time = end_time # TODO update to max(next_output, end_time) when ParticleFile works + next_time = max(time - dt, end_time) res = self._kernel.execute(self, endtime=next_time, dt=dt) if res == StatusCode.StopAllExecution: return StatusCode.StopAllExecution From e77866f9ba6e4b55927d58dc003e938e6cdd2516 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 23 Jul 2025 09:47:00 +0200 Subject: [PATCH 7/9] cleaning up _load_timesteps --- parcels/field.py | 13 +++++++++++++ parcels/fieldset.py | 7 +++++++ parcels/particleset.py | 7 ++----- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 63a680e52..4a423b42b 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -242,6 +242,19 @@ def _check_velocitysampling(self): stacklevel=2, ) + def _load_timesteps(self, time): + """Load the appropriate timesteps of a field.""" + ti = np.argmin(self.data_full.time.data <= time) - 1 # TODO also implement dt < 0 + if not hasattr(self, "data"): + self.data = self.data_full.isel({"time": slice(ti, ti + 2)}).load() + elif self.data_full.time.data[ti] == self.data.time.data[1]: + self.data = xr.concat([self.data[1, :], self.data_full.isel({"time": ti + 1}).load()], dim="time") + elif self.data_full.time.data[ti] != self.data.time.data[0]: + self.data = self.data_full.isel({"time": slice(ti, ti + 2)}).load() + assert ( + len(self.data.time) == 2 + ), f"Field {self.name} has not been loaded correctly. Expected 2 timesteps, but got {len(self.data.time)}." + def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): """Interpolate field values in space and time. diff --git a/parcels/fieldset.py b/parcels/fieldset.py index c423000fc..c67d0bd8c 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -82,6 +82,13 @@ def time_interval(self): return None return functools.reduce(lambda x, y: x.intersection(y), time_intervals) + def _load_timesteps(self, time): + """Load the appropriate timesteps of all fields in the fieldset.""" + for fldname in self.fields: + field = self.fields[fldname] + if isinstance(field, Field): + field._load_timesteps(time) + def add_field(self, field: Field, name: str | None = None): """Add a :class:`parcels.field.Field` object to the FieldSet. diff --git a/parcels/particleset.py b/parcels/particleset.py index 79c9ad347..1e235f0e5 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -805,11 +805,8 @@ def execute( time = start_time while sign_dt * (time - end_time) < 0: - - for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields - ti = np.argmin(fld.data_full.time.data <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 - if not hasattr(fld, "data") or fld.data_full.time.data[ti] != fld.data.time.data[0]: - fld.data = fld.data_full.isel({"time": slice(ti, ti + 2)}).load() + # Load the appropriate timesteps of the fieldset + self.fieldset._load_timesteps(self._data["time_nextloop"][0]) if sign_dt > 0: next_time = min(time + dt, end_time) From 5a7d7d2cb017cbe44e7bd454269b6519e6507c9e Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 23 Jul 2025 09:47:56 +0200 Subject: [PATCH 8/9] adding checks to index_search to validate bcoord between 0 and 1 --- parcels/_index_search.py | 2 ++ parcels/xgrid.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 80cb8f570..4d470488e 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -67,6 +67,8 @@ def _search_time_index(field: Field, time: datetime): if field.data.time.data[ti] != field.data.time.data[ti + 1] else 0 ) + if tau < 0 or tau > 1: # TODO only for debugging; test can go? + raise ValueError(f"Time {time} is out of bounds for field time data {field.data.time.data}.") return tau, ti diff --git a/parcels/xgrid.py b/parcels/xgrid.py index b530ebcf2..6ee905906 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -399,6 +399,8 @@ def _search_1d_array( return 0, 0.0 i = np.argmin(arr <= x) - 1 bcoord = (x - arr[i]) / (arr[i + 1] - arr[i]) + if bcoord < 0 or bcoord > 1: # TODO only for debugging; test can go? + raise ValueError(f"Position {x} is out of bounds for array {arr}.") return i, bcoord From 921909979bfe81c6c3fd57266ee36ab213ce1418 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 24 Jul 2025 14:16:51 +0200 Subject: [PATCH 9/9] Fixing merge bug --- parcels/field.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parcels/field.py b/parcels/field.py index 0f2c43053..4fbe5a815 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -189,7 +189,7 @@ def __init__( else: raise ValueError("Unsupported mesh type in data array attributes. Choose either: 'spherical' or 'flat'") - if self.data.shape[0] > 1: + if data.shape[0] > 1: if "time" not in data.coords: raise ValueError("Field data is missing a 'time' coordinate.")