Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions parcels/basegrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ def search(self, z: float, y: float, x: float, ei=None) -> dict[str, tuple[int,
- Unstructured grid: {"Z": (zi, zeta), "FACE": (fi, bcoords)}

Where:
- index (int): The cell position of a particle along the given axis
- index (int): The cell position of the particles along the given axis
- barycentric_coordinates (float or np.ndarray): The coordinates defining
a particle's position within the grid cell. For structured grids, this
the particles positions within the grid cell. For structured grids, this
is a single coordinate per axis; for unstructured grids, this can be
an array of coordinates for the face polygon.

Expand Down
48 changes: 24 additions & 24 deletions parcels/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def _deal_with_errors(error, key, vector_type: VectorType):
elif isinstance(key[-1], KernelParticle):
key[-1].state = AllParcelsErrorCodes[type(error)]
else:
raise RuntimeError(f"{error}. Error could not be handled because particle was not part of the Field Sampling.")
raise RuntimeError(f"{error}. Error could not be handled because particles was not part of the Field Sampling.")

if vector_type and "3D" in vector_type:
return (0, 0, 0)
Expand Down Expand Up @@ -205,7 +205,7 @@ def _check_velocitysampling(self):
stacklevel=2,
)

def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True):
def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True):
"""Interpolate field values in space and time.

We interpolate linearly in time and apply implicit unit
Expand All @@ -219,11 +219,11 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True):

tau, ti = _search_time_index(self, time)
position = self.grid.search(z, y, x, ei=_ei)
_update_particle_states_position(particle, position)
_update_particle_states_position(particles, position)

value = self._interp_method(self, ti, position, tau, time, z, y, x)

_update_particle_states_interp_value(particle, value)
_update_particle_states_interp_value(particles, value)

if applyConversion:
value = self.units.to_target(value, z, y, x)
Expand Down Expand Up @@ -287,7 +287,7 @@ def vector_interp_method(self, method: Callable):
_assert_same_function_signature(method, ref=ZeroInterpolator_Vector, context="Interpolation")
self._vector_interp_method = method

def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True):
def eval(self, time: datetime, z, y, x, particles=None, applyConversion=True):
"""Interpolate field values in space and time.

We interpolate linearly in time and apply implicit unit
Expand All @@ -301,7 +301,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True):

tau, ti = _search_time_index(self.U, time)
position = self.grid.search(z, y, x, ei=_ei)
_update_particle_states_position(particle, position)
_update_particle_states_position(particles, position)

if self._vector_interp_method is None:
u = self.U._interp_method(self.U, ti, position, tau, time, z, y, x)
Expand All @@ -319,7 +319,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True):
(u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x, applyConversion)

for vel in (u, v, w):
_update_particle_states_interp_value(particle, vel)
_update_particle_states_interp_value(particles, vel)

if applyConversion and ("3D" in self.vector_type):
w = self.W.units.to_target(w, z, y, x) if self.W else 0.0
Expand All @@ -339,34 +339,34 @@ def __getitem__(self, key):
return _deal_with_errors(error, key, vector_type=self.vector_type)


def _update_particle_states_position(particle, position):
def _update_particle_states_position(particles, position):
"""Update the particle states based on the position dictionary."""
if particle: # TODO also support uxgrid search
if particles: # TODO also support uxgrid search
for dim in ["X", "Y"]:
if dim in position:
particle.state = np.maximum(
np.where(position[dim][0] == -1, StatusCode.ErrorOutOfBounds, particle.state), particle.state
particles.state = np.maximum(
np.where(position[dim][0] == -1, StatusCode.ErrorOutOfBounds, particles.state), particles.state
)
particle.state = np.maximum(
np.where(position[dim][0] == GRID_SEARCH_ERROR, StatusCode.ErrorGridSearching, particle.state),
particle.state,
particles.state = np.maximum(
np.where(position[dim][0] == GRID_SEARCH_ERROR, StatusCode.ErrorGridSearching, particles.state),
particles.state,
)
if "Z" in position:
particle.state = np.maximum(
np.where(position["Z"][0] == RIGHT_OUT_OF_BOUNDS, StatusCode.ErrorOutOfBounds, particle.state),
particle.state,
particles.state = np.maximum(
np.where(position["Z"][0] == RIGHT_OUT_OF_BOUNDS, StatusCode.ErrorOutOfBounds, particles.state),
particles.state,
)
particle.state = np.maximum(
np.where(position["Z"][0] == LEFT_OUT_OF_BOUNDS, StatusCode.ErrorThroughSurface, particle.state),
particle.state,
particles.state = np.maximum(
np.where(position["Z"][0] == LEFT_OUT_OF_BOUNDS, StatusCode.ErrorThroughSurface, particles.state),
particles.state,
)


def _update_particle_states_interp_value(particle, value):
def _update_particle_states_interp_value(particles, value):
"""Update the particle states based on the interpolated value, but only if state is not an Error already."""
if particle:
particle.state = np.maximum(
np.where(np.isnan(value), StatusCode.ErrorInterpolation, particle.state), particle.state
if particles:
particles.state = np.maximum(
np.where(np.isnan(value), StatusCode.ErrorInterpolation, particles.state), particles.state
)


Expand Down
6 changes: 3 additions & 3 deletions parcels/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@


class ParticleSet:
"""Class for storing particle and executing kernel over them.
"""Class for storing particles and executing kernel over them.

Please note that this currently only supports fixed size particle sets, meaning that the particle set only
holds the particles defined on construction. Individual particles can neither be added nor deleted individually,
and individual particles can only be deleted as a set procedurally (i.e. by 'particle.delete()'-call during
kernel execution).
and individual particles can only be deleted as a set procedurally (i.e. by changing their state to 'StatusCode.Delete'
during kernel execution).

Parameters
----------
Expand Down
6 changes: 3 additions & 3 deletions parcels/tools/statuscodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,10 @@ def _raise_time_extrapolation_error(time: float, field=None):


class KernelError(RuntimeError):
"""General particle kernel error with optional custom message."""
"""General particles kernel error with optional custom message."""

def __init__(self, particle, fieldset=None, msg=None):
message = f"{particle.state}\nParticle {particle}\nTime: {particle.time}\ntimestep dt: {particle.dt}\n"
def __init__(self, particles, fieldset=None, msg=None):
message = f"{particles.state}\nParticle {particles}\nTime: {particles.time}\ntimestep dt: {particles.dt}\n"
if msg:
message += msg
super().__init__(message)
Expand Down
2 changes: 1 addition & 1 deletion parcels/xgrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ def _search_1d_array(
x: float,
) -> tuple[int, int]:
"""
Searches for the particle location in a 1D array and returns barycentric coordinate along dimension.
Searches for particle locations in a 1D array and returns barycentric coordinate along dimension.

Assumptions:
- array is strictly monotonically increasing.
Expand Down
Loading