Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions parcels/_index_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def search_indices_vertical_s(
eta = 1
if time < grid.time[ti]:
ti -= 1
if grid._z4d:
if grid._z4d: # type: ignore[attr-defined]
if ti == len(grid.time) - 1:
depth_vector = (
(1 - xsi) * (1 - eta) * grid.depth[-1, :, yi, xi]
Expand Down Expand Up @@ -232,7 +232,7 @@ def _search_indices_curvilinear(field: Field, time, z, y, x, ti=-1, particle=Non
else:
xi = int(field.grid.xdim / 2) - 1
yi = int(field.grid.ydim / 2) - 1
xsi = eta = -1
xsi = eta = -1.0
grid = field.grid
invA = np.array([[1, 0, 0, 0], [-1, 1, 0, 0], [-1, 0, 0, 1], [1, -1, 1, -1]])
maxIterSearch = 1e6
Expand Down
6 changes: 3 additions & 3 deletions parcels/_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def _linear_invdist_land_tracer_2d(ctx: InterpolationContext2D) -> float:
return 0
elif nb_land > 0:
val = 0
w_sum = 0
w_sum = 0.0
for j in range(2):
for i in range(2):
distance = pow((eta - j), 2) + pow((xsi - i), 2)
Expand Down Expand Up @@ -196,8 +196,8 @@ def _linear_invdist_land_tracer_3d(ctx: InterpolationContext3D) -> float:
if nb_land == 8:
return 0
elif nb_land > 0:
val = 0
w_sum = 0
val = 0.0
w_sum = 0.0
for k in range(2):
for j in range(2):
for i in range(2):
Expand Down
4 changes: 2 additions & 2 deletions parcels/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -830,7 +830,7 @@
attribute to the grid.
"""
if not grid.cell_edge_sizes:
if grid._gtype in (GridType.RectilinearZGrid, GridType.RectilinearSGrid):
if grid._gtype in (GridType.RectilinearZGrid, GridType.RectilinearSGrid): # type: ignore[attr-defined]
grid.cell_edge_sizes["x"] = np.zeros((grid.ydim, grid.xdim), dtype=np.float32)
grid.cell_edge_sizes["y"] = np.zeros((grid.ydim, grid.xdim), dtype=np.float32)

Expand All @@ -842,7 +842,7 @@
grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, grid.depth[0], lat, lon)
else:
raise ValueError(
f"_cell_edge_sizes() not implemented for {grid._gtype} grids. "
f"_cell_edge_sizes() not implemented for {grid._gtype} grids. " # type: ignore[attr-defined]

Check warning on line 845 in parcels/grid.py

View check run for this annotation

Codecov / codecov/patch

parcels/grid.py#L845

Added line #L845 was not covered by tests
"You can provide Field.grid.cell_edge_sizes yourself by in, e.g., "
"NEMO using the e1u fields etc from the mesh_mask.nc file."
)
Expand Down
3 changes: 2 additions & 1 deletion parcels/particledata.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,8 +350,9 @@ def flatten_dense_data_array(vname):
cstruct = CParticles(*cdata)
return cstruct

def _to_write_particles(self, pd, time):
def _to_write_particles(self, time):
"""Return the Particles that need to be written at time: if particle.time is between time-dt/2 and time+dt (/2)"""
pd = self._data
return np.where(
(
np.less_equal(time - np.abs(pd["dt"] / 2), pd["time"], where=np.isfinite(pd["time"]))
Expand Down
146 changes: 74 additions & 72 deletions parcels/particlefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,86 +285,88 @@
return

if indices is None:
indices_to_write = pset.particledata._to_write_particles(pset.particledata._data, time)
indices_to_write = pset.particledata._to_write_particles(time)
else:
indices_to_write = indices

if len(indices_to_write) > 0:
pids = pset.particledata.getvardata("id", indices_to_write)
to_add = sorted(set(pids) - set(self._pids_written.keys()))
for i, pid in enumerate(to_add):
self._pids_written[pid] = self._maxids + i
ids = np.array([self._pids_written[p] for p in pids], dtype=int)
self._maxids = len(self._pids_written)

once_ids = np.where(pset.particledata.getvardata("obs_written", indices_to_write) == 0)[0]
if len(once_ids) > 0:
ids_once = ids[once_ids]
indices_to_write_once = indices_to_write[once_ids]

if self.create_new_zarrfile:
if self.chunks is None:
self._chunks = (len(pset), 1)
if pset._repeatpclass is not None and self.chunks[0] < 1e4: # type: ignore[index]
warnings.warn(
f"ParticleFile chunks are set to {self.chunks}, but this may lead to "
f"a significant slowdown in Parcels when many calls to repeatdt. "
f"Consider setting a larger chunk size for your ParticleFile (e.g. chunks=(int(1e4), 1)).",
FileWarning,
stacklevel=2,
)
if (self._maxids > len(ids)) or (self._maxids > self.chunks[0]): # type: ignore[index]
arrsize = (self._maxids, self.chunks[1]) # type: ignore[index]
else:
arrsize = (len(ids), self.chunks[1]) # type: ignore[index]
ds = xr.Dataset(
attrs=self.metadata,
coords={"trajectory": ("trajectory", pids), "obs": ("obs", np.arange(arrsize[1], dtype=np.int32))},
if len(indices_to_write) == 0:
return

Check warning on line 293 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L293

Added line #L293 was not covered by tests

pids = pset.particledata.getvardata("id", indices_to_write)
to_add = sorted(set(pids) - set(self._pids_written.keys()))
for i, pid in enumerate(to_add):
self._pids_written[pid] = self._maxids + i
ids = np.array([self._pids_written[p] for p in pids], dtype=int)
self._maxids = len(self._pids_written)

once_ids = np.where(pset.particledata.getvardata("obs_written", indices_to_write) == 0)[0]
if len(once_ids) > 0:
ids_once = ids[once_ids]
indices_to_write_once = indices_to_write[once_ids]

if self.create_new_zarrfile:
if self.chunks is None:
self._chunks = (len(pset), 1)
if pset._repeatpclass is not None and self.chunks[0] < 1e4: # type: ignore[index]
warnings.warn(
f"ParticleFile chunks are set to {self.chunks}, but this may lead to "

Check warning on line 312 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L312

Added line #L312 was not covered by tests
f"a significant slowdown in Parcels when many calls to repeatdt. "
f"Consider setting a larger chunk size for your ParticleFile (e.g. chunks=(int(1e4), 1)).",
FileWarning,
stacklevel=2,
)
attrs = self._create_variables_attribute_dict()
obs = np.zeros((self._maxids), dtype=np.int32)
for var in self.vars_to_write:
varout = self._convert_varout_name(var)
if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate
if self._write_once(var):
data = np.full(
(arrsize[0],),
self._fill_value_map[self.vars_to_write[var]],
dtype=self.vars_to_write[var],
)
data[ids_once] = pset.particledata.getvardata(var, indices_to_write_once)
dims = ["trajectory"]
else:
data = np.full(
arrsize, self._fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var]
)
data[ids, 0] = pset.particledata.getvardata(var, indices_to_write)
dims = ["trajectory", "obs"]
ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[varout])
ds[varout].encoding["chunks"] = self.chunks[0] if self._write_once(var) else self.chunks # type: ignore[index]
ds.to_zarr(self.fname, mode="w")
self._create_new_zarrfile = False
if (self._maxids > len(ids)) or (self._maxids > self.chunks[0]): # type: ignore[index]
arrsize = (self._maxids, self.chunks[1]) # type: ignore[index]

Check warning on line 319 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L319

Added line #L319 was not covered by tests
else:
# Either use the store that was provided directly or create a DirectoryStore:
if issubclass(type(self.fname), zarr.storage.Store):
store = self.fname
else:
store = zarr.DirectoryStore(self.fname)
Z = zarr.group(store=store, overwrite=False)
obs = pset.particledata.getvardata("obs_written", indices_to_write)
for var in self.vars_to_write:
varout = self._convert_varout_name(var)
if self._maxids > Z[varout].shape[0]:
self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=0)
arrsize = (len(ids), self.chunks[1]) # type: ignore[index]
ds = xr.Dataset(
attrs=self.metadata,
coords={"trajectory": ("trajectory", pids), "obs": ("obs", np.arange(arrsize[1], dtype=np.int32))},

Check warning on line 324 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L323-L324

Added lines #L323 - L324 were not covered by tests
)
attrs = self._create_variables_attribute_dict()
obs = np.zeros((self._maxids), dtype=np.int32)
for var in self.vars_to_write:
varout = self._convert_varout_name(var)
if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate
if self._write_once(var):
if len(once_ids) > 0:
Z[varout].vindex[ids_once] = pset.particledata.getvardata(var, indices_to_write_once)
data = np.full(
(arrsize[0],),
self._fill_value_map[self.vars_to_write[var]],
dtype=self.vars_to_write[var],

Check warning on line 335 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L334-L335

Added lines #L334 - L335 were not covered by tests
)
data[ids_once] = pset.particledata.getvardata(var, indices_to_write_once)
dims = ["trajectory"]
else:
if max(obs) >= Z[varout].shape[1]: # type: ignore[type-var]
self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=1)
Z[varout].vindex[ids, obs] = pset.particledata.getvardata(var, indices_to_write)
data = np.full(
arrsize, self._fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var]

Check warning on line 341 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L341

Added line #L341 was not covered by tests
)
data[ids, 0] = pset.particledata.getvardata(var, indices_to_write)
dims = ["trajectory", "obs"]
ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[varout])
ds[varout].encoding["chunks"] = self.chunks[0] if self._write_once(var) else self.chunks # type: ignore[index]
ds.to_zarr(self.fname, mode="w")
self._create_new_zarrfile = False
else:
# Either use the store that was provided directly or create a DirectoryStore:
if isinstance(self.fname, zarr.storage.Store):
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Previously was issubclass(type(self.fname), zarr.storage.Store):, this is equivalent

store = self.fname

Check warning on line 352 in parcels/particlefile.py

View check run for this annotation

Codecov / codecov/patch

parcels/particlefile.py#L352

Added line #L352 was not covered by tests
else:
store = zarr.DirectoryStore(self.fname)
Z = zarr.group(store=store, overwrite=False)
obs = pset.particledata.getvardata("obs_written", indices_to_write)
for var in self.vars_to_write:
varout = self._convert_varout_name(var)
if self._maxids > Z[varout].shape[0]:
self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=0)
if self._write_once(var):
if len(once_ids) > 0:
Z[varout].vindex[ids_once] = pset.particledata.getvardata(var, indices_to_write_once)
else:
if max(obs) >= Z[varout].shape[1]: # type: ignore[type-var]
self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=1)
Z[varout].vindex[ids, obs] = pset.particledata.getvardata(var, indices_to_write)

pset.particledata.setvardata("obs_written", indices_to_write, obs + 1)
pset.particledata.setvardata("obs_written", indices_to_write, obs + 1)
Comment on lines +292 to +369
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is just a reversal of the clause and a de-indentation.


def write_latest_locations(self, pset, time):
"""Write the current (latest) particle locations to zarr file.
Expand Down
Loading