From 79a2d8dcdb28225a51d97b9735654ca986133099 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Sat, 13 Sep 2025 15:36:59 +0200 Subject: [PATCH] Some renaming of particle to particles To stay consistent with the change in Kernel API, where we also changed form `particle` to `particles` --- parcels/basegrid.py | 4 +-- parcels/field.py | 48 ++++++++++++++++++------------------ parcels/particleset.py | 6 ++--- parcels/tools/statuscodes.py | 6 ++--- parcels/xgrid.py | 2 +- 5 files changed, 33 insertions(+), 33 deletions(-) diff --git a/parcels/basegrid.py b/parcels/basegrid.py index 2765dd59e..b53f51314 100644 --- a/parcels/basegrid.py +++ b/parcels/basegrid.py @@ -54,9 +54,9 @@ def search(self, z: float, y: float, x: float, ei=None) -> dict[str, tuple[int, - Unstructured grid: {"Z": (zi, zeta), "FACE": (fi, bcoords)} Where: - - index (int): The cell position of a particle along the given axis + - index (int): The cell position of the particles along the given axis - barycentric_coordinates (float or np.ndarray): The coordinates defining - a particle's position within the grid cell. For structured grids, this + the particles positions within the grid cell. For structured grids, this is a single coordinate per axis; for unstructured grids, this can be an array of coordinates for the face polygon. diff --git a/parcels/field.py b/parcels/field.py index 4d612ea15..fff0796b5 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -41,7 +41,7 @@ def _deal_with_errors(error, key, vector_type: VectorType): elif isinstance(key[-1], KernelParticle): key[-1].state = AllParcelsErrorCodes[type(error)] else: - raise RuntimeError(f"{error}. Error could not be handled because particle was not part of the Field Sampling.") + raise RuntimeError(f"{error}. Error could not be handled because particles was not part of the Field Sampling.") if vector_type and "3D" in vector_type: return (0, 0, 0) @@ -205,7 +205,7 @@ def _check_velocitysampling(self): stacklevel=2, ) - def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): + def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True): """Interpolate field values in space and time. We interpolate linearly in time and apply implicit unit @@ -219,11 +219,11 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): tau, ti = _search_time_index(self, time) position = self.grid.search(z, y, x, ei=_ei) - _update_particle_states_position(particle, position) + _update_particle_states_position(particles, position) value = self._interp_method(self, ti, position, tau, time, z, y, x) - _update_particle_states_interp_value(particle, value) + _update_particle_states_interp_value(particles, value) if applyConversion: value = self.units.to_target(value, z, y, x) @@ -287,7 +287,7 @@ def vector_interp_method(self, method: Callable): _assert_same_function_signature(method, ref=ZeroInterpolator_Vector, context="Interpolation") self._vector_interp_method = method - def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): + def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True): """Interpolate field values in space and time. We interpolate linearly in time and apply implicit unit @@ -301,7 +301,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): tau, ti = _search_time_index(self.U, time) position = self.grid.search(z, y, x, ei=_ei) - _update_particle_states_position(particle, position) + _update_particle_states_position(particles, position) if self._vector_interp_method is None: u = self.U._interp_method(self.U, ti, position, tau, time, z, y, x) @@ -319,7 +319,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x, applyConversion) for vel in (u, v, w): - _update_particle_states_interp_value(particle, vel) + _update_particle_states_interp_value(particles, vel) if applyConversion and ("3D" in self.vector_type): w = self.W.units.to_target(w, z, y, x) if self.W else 0.0 @@ -339,34 +339,34 @@ def __getitem__(self, key): return _deal_with_errors(error, key, vector_type=self.vector_type) -def _update_particle_states_position(particle, position): +def _update_particle_states_position(particles, position): """Update the particle states based on the position dictionary.""" - if particle: # TODO also support uxgrid search + if particles: # TODO also support uxgrid search for dim in ["X", "Y"]: if dim in position: - particle.state = np.maximum( - np.where(position[dim][0] == -1, StatusCode.ErrorOutOfBounds, particle.state), particle.state + particles.state = np.maximum( + np.where(position[dim][0] == -1, StatusCode.ErrorOutOfBounds, particles.state), particles.state ) - particle.state = np.maximum( - np.where(position[dim][0] == GRID_SEARCH_ERROR, StatusCode.ErrorGridSearching, particle.state), - particle.state, + particles.state = np.maximum( + np.where(position[dim][0] == GRID_SEARCH_ERROR, StatusCode.ErrorGridSearching, particles.state), + particles.state, ) if "Z" in position: - particle.state = np.maximum( - np.where(position["Z"][0] == RIGHT_OUT_OF_BOUNDS, StatusCode.ErrorOutOfBounds, particle.state), - particle.state, + particles.state = np.maximum( + np.where(position["Z"][0] == RIGHT_OUT_OF_BOUNDS, StatusCode.ErrorOutOfBounds, particles.state), + particles.state, ) - particle.state = np.maximum( - np.where(position["Z"][0] == LEFT_OUT_OF_BOUNDS, StatusCode.ErrorThroughSurface, particle.state), - particle.state, + particles.state = np.maximum( + np.where(position["Z"][0] == LEFT_OUT_OF_BOUNDS, StatusCode.ErrorThroughSurface, particles.state), + particles.state, ) -def _update_particle_states_interp_value(particle, value): +def _update_particle_states_interp_value(particles, value): """Update the particle states based on the interpolated value, but only if state is not an Error already.""" - if particle: - particle.state = np.maximum( - np.where(np.isnan(value), StatusCode.ErrorInterpolation, particle.state), particle.state + if particles: + particles.state = np.maximum( + np.where(np.isnan(value), StatusCode.ErrorInterpolation, particles.state), particles.state ) diff --git a/parcels/particleset.py b/parcels/particleset.py index b44cef0be..98f4f9dc3 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -24,12 +24,12 @@ class ParticleSet: - """Class for storing particle and executing kernel over them. + """Class for storing particles and executing kernel over them. Please note that this currently only supports fixed size particle sets, meaning that the particle set only holds the particles defined on construction. Individual particles can neither be added nor deleted individually, - and individual particles can only be deleted as a set procedurally (i.e. by 'particle.delete()'-call during - kernel execution). + and individual particles can only be deleted as a set procedurally (i.e. by changing their state to 'StatusCode.Delete' + during kernel execution). Parameters ---------- diff --git a/parcels/tools/statuscodes.py b/parcels/tools/statuscodes.py index aa84b3ef4..9b41ba033 100644 --- a/parcels/tools/statuscodes.py +++ b/parcels/tools/statuscodes.py @@ -110,10 +110,10 @@ def _raise_time_extrapolation_error(time: float, field=None): class KernelError(RuntimeError): - """General particle kernel error with optional custom message.""" + """General particles kernel error with optional custom message.""" - def __init__(self, particle, fieldset=None, msg=None): - message = f"{particle.state}\nParticle {particle}\nTime: {particle.time}\ntimestep dt: {particle.dt}\n" + def __init__(self, particles, fieldset=None, msg=None): + message = f"{particles.state}\nParticle {particles}\nTime: {particles.time}\ntimestep dt: {particles.dt}\n" if msg: message += msg super().__init__(message) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index 0ab5feec1..3ac898be4 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -508,7 +508,7 @@ def _search_1d_array( x: float, ) -> tuple[int, int]: """ - Searches for the particle location in a 1D array and returns barycentric coordinate along dimension. + Searches for particle locations in a 1D array and returns barycentric coordinate along dimension. Assumptions: - array is strictly monotonically increasing.