diff --git a/parcels/_index_search.py b/parcels/_index_search.py index a6f733fea1..ada8ff98fd 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -79,7 +79,7 @@ def search_indices_vertical_s( eta = 1 if time < grid.time[ti]: ti -= 1 - if grid._z4d: + if grid._z4d: # type: ignore[attr-defined] if ti == len(grid.time) - 1: depth_vector = ( (1 - xsi) * (1 - eta) * grid.depth[-1, :, yi, xi] @@ -232,7 +232,7 @@ def _search_indices_curvilinear(field: Field, time, z, y, x, ti=-1, particle=Non else: xi = int(field.grid.xdim / 2) - 1 yi = int(field.grid.ydim / 2) - 1 - xsi = eta = -1 + xsi = eta = -1.0 grid = field.grid invA = np.array([[1, 0, 0, 0], [-1, 1, 0, 0], [-1, 0, 0, 1], [1, -1, 1, -1]]) maxIterSearch = 1e6 diff --git a/parcels/_interpolation.py b/parcels/_interpolation.py index a6ebf65950..a533fd9d56 100644 --- a/parcels/_interpolation.py +++ b/parcels/_interpolation.py @@ -146,7 +146,7 @@ def _linear_invdist_land_tracer_2d(ctx: InterpolationContext2D) -> float: return 0 elif nb_land > 0: val = 0 - w_sum = 0 + w_sum = 0.0 for j in range(2): for i in range(2): distance = pow((eta - j), 2) + pow((xsi - i), 2) @@ -196,8 +196,8 @@ def _linear_invdist_land_tracer_3d(ctx: InterpolationContext3D) -> float: if nb_land == 8: return 0 elif nb_land > 0: - val = 0 - w_sum = 0 + val = 0.0 + w_sum = 0.0 for k in range(2): for j in range(2): for i in range(2): diff --git a/parcels/grid.py b/parcels/grid.py index 4f211382a9..244afe1494 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -830,7 +830,7 @@ def _calc_cell_edge_sizes(grid: RectilinearGrid) -> None: attribute to the grid. """ if not grid.cell_edge_sizes: - if grid._gtype in (GridType.RectilinearZGrid, GridType.RectilinearSGrid): + if grid._gtype in (GridType.RectilinearZGrid, GridType.RectilinearSGrid): # type: ignore[attr-defined] grid.cell_edge_sizes["x"] = np.zeros((grid.ydim, grid.xdim), dtype=np.float32) grid.cell_edge_sizes["y"] = np.zeros((grid.ydim, grid.xdim), dtype=np.float32) @@ -842,7 +842,7 @@ def _calc_cell_edge_sizes(grid: RectilinearGrid) -> None: grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, grid.depth[0], lat, lon) else: raise ValueError( - f"_cell_edge_sizes() not implemented for {grid._gtype} grids. " + f"_cell_edge_sizes() not implemented for {grid._gtype} grids. " # type: ignore[attr-defined] "You can provide Field.grid.cell_edge_sizes yourself by in, e.g., " "NEMO using the e1u fields etc from the mesh_mask.nc file." ) diff --git a/parcels/particledata.py b/parcels/particledata.py index 516083e830..9f2eb5ccdc 100644 --- a/parcels/particledata.py +++ b/parcels/particledata.py @@ -350,8 +350,9 @@ def flatten_dense_data_array(vname): cstruct = CParticles(*cdata) return cstruct - def _to_write_particles(self, pd, time): + def _to_write_particles(self, time): """Return the Particles that need to be written at time: if particle.time is between time-dt/2 and time+dt (/2)""" + pd = self._data return np.where( ( np.less_equal(time - np.abs(pd["dt"] / 2), pd["time"], where=np.isfinite(pd["time"])) diff --git a/parcels/particlefile.py b/parcels/particlefile.py index 5f815ea8a4..fb67f7f57c 100644 --- a/parcels/particlefile.py +++ b/parcels/particlefile.py @@ -285,86 +285,88 @@ def write(self, pset, time: float | timedelta | np.timedelta64 | None, indices=N return if indices is None: - indices_to_write = pset.particledata._to_write_particles(pset.particledata._data, time) + indices_to_write = pset.particledata._to_write_particles(time) else: indices_to_write = indices - if len(indices_to_write) > 0: - pids = pset.particledata.getvardata("id", indices_to_write) - to_add = sorted(set(pids) - set(self._pids_written.keys())) - for i, pid in enumerate(to_add): - self._pids_written[pid] = self._maxids + i - ids = np.array([self._pids_written[p] for p in pids], dtype=int) - self._maxids = len(self._pids_written) - - once_ids = np.where(pset.particledata.getvardata("obs_written", indices_to_write) == 0)[0] - if len(once_ids) > 0: - ids_once = ids[once_ids] - indices_to_write_once = indices_to_write[once_ids] - - if self.create_new_zarrfile: - if self.chunks is None: - self._chunks = (len(pset), 1) - if pset._repeatpclass is not None and self.chunks[0] < 1e4: # type: ignore[index] - warnings.warn( - f"ParticleFile chunks are set to {self.chunks}, but this may lead to " - f"a significant slowdown in Parcels when many calls to repeatdt. " - f"Consider setting a larger chunk size for your ParticleFile (e.g. chunks=(int(1e4), 1)).", - FileWarning, - stacklevel=2, - ) - if (self._maxids > len(ids)) or (self._maxids > self.chunks[0]): # type: ignore[index] - arrsize = (self._maxids, self.chunks[1]) # type: ignore[index] - else: - arrsize = (len(ids), self.chunks[1]) # type: ignore[index] - ds = xr.Dataset( - attrs=self.metadata, - coords={"trajectory": ("trajectory", pids), "obs": ("obs", np.arange(arrsize[1], dtype=np.int32))}, + if len(indices_to_write) == 0: + return + + pids = pset.particledata.getvardata("id", indices_to_write) + to_add = sorted(set(pids) - set(self._pids_written.keys())) + for i, pid in enumerate(to_add): + self._pids_written[pid] = self._maxids + i + ids = np.array([self._pids_written[p] for p in pids], dtype=int) + self._maxids = len(self._pids_written) + + once_ids = np.where(pset.particledata.getvardata("obs_written", indices_to_write) == 0)[0] + if len(once_ids) > 0: + ids_once = ids[once_ids] + indices_to_write_once = indices_to_write[once_ids] + + if self.create_new_zarrfile: + if self.chunks is None: + self._chunks = (len(pset), 1) + if pset._repeatpclass is not None and self.chunks[0] < 1e4: # type: ignore[index] + warnings.warn( + f"ParticleFile chunks are set to {self.chunks}, but this may lead to " + f"a significant slowdown in Parcels when many calls to repeatdt. " + f"Consider setting a larger chunk size for your ParticleFile (e.g. chunks=(int(1e4), 1)).", + FileWarning, + stacklevel=2, ) - attrs = self._create_variables_attribute_dict() - obs = np.zeros((self._maxids), dtype=np.int32) - for var in self.vars_to_write: - varout = self._convert_varout_name(var) - if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate - if self._write_once(var): - data = np.full( - (arrsize[0],), - self._fill_value_map[self.vars_to_write[var]], - dtype=self.vars_to_write[var], - ) - data[ids_once] = pset.particledata.getvardata(var, indices_to_write_once) - dims = ["trajectory"] - else: - data = np.full( - arrsize, self._fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var] - ) - data[ids, 0] = pset.particledata.getvardata(var, indices_to_write) - dims = ["trajectory", "obs"] - ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[varout]) - ds[varout].encoding["chunks"] = self.chunks[0] if self._write_once(var) else self.chunks # type: ignore[index] - ds.to_zarr(self.fname, mode="w") - self._create_new_zarrfile = False + if (self._maxids > len(ids)) or (self._maxids > self.chunks[0]): # type: ignore[index] + arrsize = (self._maxids, self.chunks[1]) # type: ignore[index] else: - # Either use the store that was provided directly or create a DirectoryStore: - if issubclass(type(self.fname), zarr.storage.Store): - store = self.fname - else: - store = zarr.DirectoryStore(self.fname) - Z = zarr.group(store=store, overwrite=False) - obs = pset.particledata.getvardata("obs_written", indices_to_write) - for var in self.vars_to_write: - varout = self._convert_varout_name(var) - if self._maxids > Z[varout].shape[0]: - self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=0) + arrsize = (len(ids), self.chunks[1]) # type: ignore[index] + ds = xr.Dataset( + attrs=self.metadata, + coords={"trajectory": ("trajectory", pids), "obs": ("obs", np.arange(arrsize[1], dtype=np.int32))}, + ) + attrs = self._create_variables_attribute_dict() + obs = np.zeros((self._maxids), dtype=np.int32) + for var in self.vars_to_write: + varout = self._convert_varout_name(var) + if varout not in ["trajectory"]: # because 'trajectory' is written as coordinate if self._write_once(var): - if len(once_ids) > 0: - Z[varout].vindex[ids_once] = pset.particledata.getvardata(var, indices_to_write_once) + data = np.full( + (arrsize[0],), + self._fill_value_map[self.vars_to_write[var]], + dtype=self.vars_to_write[var], + ) + data[ids_once] = pset.particledata.getvardata(var, indices_to_write_once) + dims = ["trajectory"] else: - if max(obs) >= Z[varout].shape[1]: # type: ignore[type-var] - self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=1) - Z[varout].vindex[ids, obs] = pset.particledata.getvardata(var, indices_to_write) + data = np.full( + arrsize, self._fill_value_map[self.vars_to_write[var]], dtype=self.vars_to_write[var] + ) + data[ids, 0] = pset.particledata.getvardata(var, indices_to_write) + dims = ["trajectory", "obs"] + ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[varout]) + ds[varout].encoding["chunks"] = self.chunks[0] if self._write_once(var) else self.chunks # type: ignore[index] + ds.to_zarr(self.fname, mode="w") + self._create_new_zarrfile = False + else: + # Either use the store that was provided directly or create a DirectoryStore: + if isinstance(self.fname, zarr.storage.Store): + store = self.fname + else: + store = zarr.DirectoryStore(self.fname) + Z = zarr.group(store=store, overwrite=False) + obs = pset.particledata.getvardata("obs_written", indices_to_write) + for var in self.vars_to_write: + varout = self._convert_varout_name(var) + if self._maxids > Z[varout].shape[0]: + self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=0) + if self._write_once(var): + if len(once_ids) > 0: + Z[varout].vindex[ids_once] = pset.particledata.getvardata(var, indices_to_write_once) + else: + if max(obs) >= Z[varout].shape[1]: # type: ignore[type-var] + self._extend_zarr_dims(Z[varout], store, dtype=self.vars_to_write[var], axis=1) + Z[varout].vindex[ids, obs] = pset.particledata.getvardata(var, indices_to_write) - pset.particledata.setvardata("obs_written", indices_to_write, obs + 1) + pset.particledata.setvardata("obs_written", indices_to_write, obs + 1) def write_latest_locations(self, pset, time): """Write the current (latest) particle locations to zarr file.