Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/ci/min_deps_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def main() -> None:
print("\nErrors:")
print("-------")
for i, e in enumerate(errors):
print(f"{i+1}. {e}")
print(f"{i + 1}. {e}")
sys.exit(1)


Expand Down
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
types: [text]
files: \.(json|ipynb)$
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.12.5
hooks:
- id: ruff
name: ruff lint (.py)
Expand All @@ -23,13 +23,13 @@ repos:
- id: ruff-format
types_or: [python, jupyter]
- repo: https://github.com/rbubley/mirrors-prettier # Update mirror as official mirror is deprecated
rev: v3.4.2
rev: v3.6.2
hooks:
- id: prettier

# Ruff doesn't have full coverage of pydoclint https://github.com/astral-sh/ruff/issues/12434
- repo: https://github.com/PyCQA/flake8
rev: 7.1.1
rev: 7.3.0
hooks:
- id: flake8
name: pydoclint
Expand Down
12 changes: 6 additions & 6 deletions docs/examples/example_dask_chunk_OCMs.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,13 +377,13 @@ def test_swash(mode, chunk_mode):
if chunk_mode not in [
"failsafe",
]:
assert len(fieldset.U.grid._load_chunk) == len(
fieldset.V.grid._load_chunk
), f"U {fieldset.U.grid.chunk_info} vs V {fieldset.V.grid.chunk_info}"
assert len(fieldset.U.grid._load_chunk) == len(fieldset.V.grid._load_chunk), (
f"U {fieldset.U.grid.chunk_info} vs V {fieldset.V.grid.chunk_info}"
)
if chunk_mode not in ["failsafe", "auto"]:
assert len(fieldset.U.grid._load_chunk) == len(
fieldset.W.grid._load_chunk
), f"U {fieldset.U.grid.chunk_info} vs W {fieldset.W.grid.chunk_info}"
assert len(fieldset.U.grid._load_chunk) == len(fieldset.W.grid._load_chunk), (
f"U {fieldset.U.grid.chunk_info} vs W {fieldset.W.grid.chunk_info}"
)
if chunk_mode is False:
assert len(fieldset.U.grid._load_chunk) == 1
else:
Expand Down
6 changes: 2 additions & 4 deletions docs/examples/tutorial_NestedFields.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -235,15 +235,13 @@
"pset = parcels.ParticleSet(fieldset, pclass=SampleParticle, lon=[1000], lat=[500])\n",
"pset.execute(SampleNestedFieldIndex, runtime=1)\n",
"print(\n",
" f\"Particle ({pset[0].lon:g}, {pset[0].lat:g}) \"\n",
" f\"interpolates Field #{int(pset[0].f)}\"\n",
" f\"Particle ({pset[0].lon:g}, {pset[0].lat:g}) interpolates Field #{int(pset[0].f)}\"\n",
")\n",
"\n",
"pset[0].lon = 10000\n",
"pset.execute(SampleNestedFieldIndex, runtime=1)\n",
"print(\n",
" f\"Particle ({pset[0].lon:g}, {pset[0].lat:g}) \"\n",
" f\"interpolates Field #{int(pset[0].f)}\"\n",
" f\"Particle ({pset[0].lon:g}, {pset[0].lat:g}) interpolates Field #{int(pset[0].f)}\"\n",
")"
]
}
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/tutorial_nemo_3D.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@
"print(\n",
" f\"Level[{int(depth_level)}] depth is: \"\n",
" f\"[{fieldset.W.grid.depth[depth_level]:g} \"\n",
" f\"{fieldset.W.grid.depth[depth_level+1]:g}]\"\n",
" f\"{fieldset.W.grid.depth[depth_level + 1]:g}]\"\n",
")\n",
"\n",
"plt.pcolormesh(\n",
Expand Down
4 changes: 2 additions & 2 deletions docs/examples/tutorial_peninsula_AvsCgrid.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@
" # Set the same limits for all subplots\n",
" ax.set_xlim([fieldset.U.lon.min(), fieldset.U.lon.max()])\n",
" ax.set_ylim([0, 23e3])\n",
" m2km = lambda x, _: f\"{x/1000:.1f}\"\n",
" m2km = lambda x, _: f\"{x / 1000:.1f}\"\n",
" ax.xaxis.set_major_formatter(m2km)\n",
" ax.yaxis.set_major_formatter(m2km)\n",
" ax.set_xlabel(\"x [km]\")\n",
Expand Down Expand Up @@ -347,7 +347,7 @@
"ax.set_ylim([0, 23e3])\n",
"ax.set_ylabel(\"y [km]\")\n",
"ax.set_xlabel(\"x [km]\")\n",
"m2km = lambda x, _: f\"{x/1000:.1f}\"\n",
"m2km = lambda x, _: f\"{x / 1000:.1f}\"\n",
"ax.xaxis.set_major_formatter(m2km)\n",
"ax.yaxis.set_major_formatter(m2km)\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,7 +832,7 @@ def visit_FieldEvalNode(self, node):
# Get Cs_w values directly from fieldset (since they are 1D in vertical only)
Cs_w = [float(self.fieldset.Cs_w.data[0][zi][0][0]) for zi in range(self.fieldset.Cs_w.data.shape[1])]
statements_croco = [
c.Statement(f"float cs_w[] = {*Cs_w, }".replace("(", "{").replace(")", "}")),
c.Statement(f"float cs_w[] = {(*Cs_w,)}".replace("(", "{").replace(")", "}")),
c.Statement(
f"{node.var} = croco_from_z_to_sigma(time, {args[1]}, {args[2]}, {args[3]}, U, H, Zeta, &particles->ti[pnum*ngrid], &particles->zi[pnum*ngrid], &particles->yi[pnum*ngrid], &particles->xi[pnum*ngrid], hc, &cs_w)"
),
Expand Down Expand Up @@ -861,7 +861,7 @@ def visit_VectorFieldEvalNode(self, node):
# Get Cs_w values directly from fieldset (since they are 1D in vertical only)
Cs_w = [float(self.fieldset.Cs_w.data[0][zi][0][0]) for zi in range(self.fieldset.Cs_w.data.shape[1])]
statements_croco = [
c.Statement(f"float cs_w[] = {*Cs_w, }".replace("(", "{").replace(")", "}")),
c.Statement(f"float cs_w[] = {(*Cs_w,)}".replace("(", "{").replace(")", "}")),
c.Statement(
f"{node.var4} = croco_from_z_to_sigma(time, {args[1]}, {args[2]}, {args[3]}, U, H, Zeta, &particles->ti[pnum*ngrid], &particles->zi[pnum*ngrid], &particles->yi[pnum*ngrid], &particles->xi[pnum*ngrid], hc, &cs_w)"
),
Expand Down
54 changes: 27 additions & 27 deletions parcels/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,9 +334,9 @@ def __init__(
self.grid.depth_field = kwargs.pop("depth_field", None)

if self.grid.depth_field == "not_yet_set":
assert (
self.grid._z4d
), "Providing the depth dimensions from another field data is only available for 4d S grids"
assert self.grid._z4d, (
"Providing the depth dimensions from another field data is only available for 4d S grids"
)

# data_full_zdim is the vertical dimension of the complete field data, ignoring the indices.
# (data_full_zdim = grid.zdim if no indices are used, for A- and C-grids and for some B-grids). It is used for the B-grid,
Expand Down Expand Up @@ -572,20 +572,20 @@ def from_netcdf(
# Ensure the timestamps array is compatible with the user-provided datafiles.
if timestamps is not None:
if isinstance(filenames, list):
assert len(filenames) == len(
timestamps
), "Outer dimension of timestamps should correspond to number of files."
assert len(filenames) == len(timestamps), (
"Outer dimension of timestamps should correspond to number of files."
)
elif isinstance(filenames, dict):
for k in filenames.keys():
if k not in ["lat", "lon", "depth", "time"]:
if isinstance(filenames[k], list):
assert len(filenames[k]) == len(
timestamps
), "Outer dimension of timestamps should correspond to number of files."
assert len(filenames[k]) == len(timestamps), (
"Outer dimension of timestamps should correspond to number of files."
)
else:
assert (
len(timestamps) == 1
), "Outer dimension of timestamps should correspond to number of files."
assert len(timestamps) == 1, (
"Outer dimension of timestamps should correspond to number of files."
)
for t in timestamps:
assert isinstance(t, (list, np.ndarray)), "timestamps should be a list for each file"

Expand All @@ -597,13 +597,13 @@ def from_netcdf(
if isinstance(variable, str): # for backward compatibility with Parcels < 2.0.0
variable = (variable, variable)
elif isinstance(variable, dict):
assert (
len(variable) == 1
), "Field.from_netcdf() supports only one variable at a time. Use FieldSet.from_netcdf() for multiple variables."
assert len(variable) == 1, (
"Field.from_netcdf() supports only one variable at a time. Use FieldSet.from_netcdf() for multiple variables."
)
variable = tuple(variable.items())[0]
assert (
len(variable) == 2
), "The variable tuple must have length 2. Use FieldSet.from_netcdf() for multiple variables"
assert len(variable) == 2, (
"The variable tuple must have length 2. Use FieldSet.from_netcdf() for multiple variables"
)

data_filenames = cls._get_dim_filenames(filenames, "data")
lonlat_filename = cls._get_dim_filenames(filenames, "lon")
Expand Down Expand Up @@ -2136,21 +2136,21 @@ def __init__(self, name: str, F, V=None, W=None):
if isinstance(F[0], VectorField):
vector_type = F[0].vector_type
for Fi in F:
assert isinstance(Fi, Field) or (
isinstance(Fi, VectorField) and Fi.vector_type == vector_type
), "Components of a NestedField must be Field or VectorField"
assert isinstance(Fi, Field) or (isinstance(Fi, VectorField) and Fi.vector_type == vector_type), (
"Components of a NestedField must be Field or VectorField"
)
self.append(Fi)
elif W is None:
for i, Fi, Vi in zip(range(len(F)), F, V, strict=True):
assert isinstance(Fi, Field) and isinstance(
Vi, Field
), "F, and V components of a NestedField must be Field"
assert isinstance(Fi, Field) and isinstance(Vi, Field), (
"F, and V components of a NestedField must be Field"
)
self.append(VectorField(f"{name}_{i}", Fi, Vi))
else:
for i, Fi, Vi, Wi in zip(range(len(F)), F, V, W, strict=True):
assert (
isinstance(Fi, Field) and isinstance(Vi, Field) and isinstance(Wi, Field)
), "F, V and W components of a NestedField must be Field"
assert isinstance(Fi, Field) and isinstance(Vi, Field) and isinstance(Wi, Field), (
"F, V and W components of a NestedField must be Field"
)
self.append(VectorField(f"{name}_{i}", Fi, Vi, Wi))
self.name = name

Expand Down
6 changes: 3 additions & 3 deletions parcels/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,9 +313,9 @@ def check_velocityfields(U, V, W):
g._check_zonal_periodic()
if len(g.time) == 1:
continue
assert isinstance(
g.time_origin.time_origin, type(self.time_origin.time_origin)
), "time origins of different grids must be have the same type"
assert isinstance(g.time_origin.time_origin, type(self.time_origin.time_origin)), (
"time origins of different grids must be have the same type"
)
g.time = g.time + self.time_origin.reltime(g.time_origin)
if g.defer_load:
g.time_full = g.time_full + self.time_origin.reltime(g.time_origin)
Expand Down
66 changes: 33 additions & 33 deletions parcels/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ def __init__(
if not lat.dtype == np.float32:
lat = lat.astype(np.float32)
if not time.dtype == np.float64:
assert isinstance(
time[0], (np.integer, np.floating, float, int)
), "Time vector must be an array of int or floats"
assert isinstance(time[0], (np.integer, np.floating, float, int)), (
"Time vector must be an array of int or floats"
)
time = time.astype(np.float64)

self._lon = lon
Expand Down Expand Up @@ -627,22 +627,22 @@ def __init__(
self._z4d = 1 if len(self.depth.shape) == 4 else 0
if self._z4d:
# self.depth.shape[0] is 0 for S grids loaded from netcdf file
assert (
self.tdim == self.depth.shape[0] or self.depth.shape[0] == 0
), "depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
assert (
self.xdim == self.depth.shape[-1] or self.depth.shape[-1] == 0
), "depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
assert (
self.ydim == self.depth.shape[-2] or self.depth.shape[-2] == 0
), "depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
assert self.tdim == self.depth.shape[0] or self.depth.shape[0] == 0, (
"depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
)
assert self.xdim == self.depth.shape[-1] or self.depth.shape[-1] == 0, (
"depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
)
assert self.ydim == self.depth.shape[-2] or self.depth.shape[-2] == 0, (
"depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
)
else:
assert (
self.xdim == self.depth.shape[-1]
), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
assert (
self.ydim == self.depth.shape[-2]
), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
assert self.xdim == self.depth.shape[-1], (
"depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
)
assert self.ydim == self.depth.shape[-2], (
"depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
)
if not self.depth.dtype == np.float32:
self._depth = self.depth.astype(np.float32)
if self._lat_flipped:
Expand Down Expand Up @@ -799,22 +799,22 @@ def __init__(
self._z4d = 1 if len(self.depth.shape) == 4 else 0
if self._z4d:
# self.depth.shape[0] is 0 for S grids loaded from netcdf file
assert (
self.tdim == self.depth.shape[0] or self.depth.shape[0] == 0
), "depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
assert (
self.xdim == self.depth.shape[-1] or self.depth.shape[-1] == 0
), "depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
assert (
self.ydim == self.depth.shape[-2] or self.depth.shape[-2] == 0
), "depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
assert self.tdim == self.depth.shape[0] or self.depth.shape[0] == 0, (
"depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
)
assert self.xdim == self.depth.shape[-1] or self.depth.shape[-1] == 0, (
"depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
)
assert self.ydim == self.depth.shape[-2] or self.depth.shape[-2] == 0, (
"depth dimension has the wrong format. It should be [tdim, zdim, ydim, xdim]"
)
else:
assert (
self.xdim == self.depth.shape[-1]
), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
assert (
self.ydim == self.depth.shape[-2]
), "depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
assert self.xdim == self.depth.shape[-1], (
"depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
)
assert self.ydim == self.depth.shape[-2], (
"depth dimension has the wrong format. It should be [zdim, ydim, xdim]"
)
if not self.depth.dtype == np.float32:
self._depth = self.depth.astype(np.float32)

Expand Down
6 changes: 3 additions & 3 deletions parcels/interaction/interactionkernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,9 @@ def __init__(

numkernelargs = self.check_kernel_signature_on_version()

assert numkernelargs[0] == 5 and numkernelargs.count(numkernelargs[0]) == len(
numkernelargs
), "Interactionkernels take exactly 5 arguments: particle, fieldset, time, neighbors, mutator"
assert numkernelargs[0] == 5 and numkernelargs.count(numkernelargs[0]) == len(numkernelargs), (
"Interactionkernels take exactly 5 arguments: particle, fieldset, time, neighbors, mutator"
)

# At this time, JIT mode is not supported for InteractionKernels,
# so there is no need for any further "processing" of pyfunc's.
Expand Down
Loading
Loading