Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/ci/min_deps_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def main() -> None:
print("\nErrors:")
print("-------")
for i, e in enumerate(errors):
print(f"{i+1}. {e}")
print(f"{i + 1}. {e}")
sys.exit(1)


Expand Down
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
types: [text]
files: \.(json|ipynb)$
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.12.5
hooks:
- id: ruff
name: ruff lint (.py)
Expand All @@ -23,13 +23,13 @@ repos:
- id: ruff-format
types_or: [python, jupyter]
- repo: https://github.com/rbubley/mirrors-prettier # Update mirror as official mirror is deprecated
rev: v3.4.2
rev: v3.6.2
hooks:
- id: prettier

# Ruff doesn't have full coverage of pydoclint https://github.com/astral-sh/ruff/issues/12434
- repo: https://github.com/PyCQA/flake8
rev: 7.1.1
rev: 7.3.0
hooks:
- id: flake8
name: pydoclint
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/tutorial_nemo_3D.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@
"print(\n",
" f\"Level[{int(depth_level)}] depth is: \"\n",
" f\"[{fieldset.W.grid.depth[depth_level]:g} \"\n",
" f\"{fieldset.W.grid.depth[depth_level+1]:g}]\"\n",
" f\"{fieldset.W.grid.depth[depth_level + 1]:g}]\"\n",
")\n",
"\n",
"plt.pcolormesh(\n",
Expand Down
4 changes: 2 additions & 2 deletions docs/examples/tutorial_peninsula_AvsCgrid.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@
" # Set the same limits for all subplots\n",
" ax.set_xlim([fieldset.U.lon.min(), fieldset.U.lon.max()])\n",
" ax.set_ylim([0, 23e3])\n",
" m2km = lambda x, _: f\"{x/1000:.1f}\"\n",
" m2km = lambda x, _: f\"{x / 1000:.1f}\"\n",
" ax.xaxis.set_major_formatter(m2km)\n",
" ax.yaxis.set_major_formatter(m2km)\n",
" ax.set_xlabel(\"x [km]\")\n",
Expand Down Expand Up @@ -306,7 +306,7 @@
"ax.set_ylim([0, 23e3])\n",
"ax.set_ylabel(\"y [km]\")\n",
"ax.set_xlabel(\"x [km]\")\n",
"m2km = lambda x, _: f\"{x/1000:.1f}\"\n",
"m2km = lambda x, _: f\"{x / 1000:.1f}\"\n",
"ax.xaxis.set_major_formatter(m2km)\n",
"ax.yaxis.set_major_formatter(m2km)\n",
"\n",
Expand Down
1 change: 0 additions & 1 deletion docs/v4/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ classDiagram
Here, important things to note are:

- Interpolators (which would implement the `Interpolator` protocol) are responsible for the actual interpolation of the data, and performance considerations. There will be interpolation and indexing utilities that can be made available to the interpolators, allowing for code re-use.

- Interpolators of the data should handle spatial periodicity and, for the case of rectilinear structured grids, without pre-computing a halo for the FieldSet and Grid ([issue](https://github.com/OceanParcels/Parcels/issues/1898)).

- In the `Field` class, not all combinations of `data`, `grid`, and `interpolator` will logically make sense (e.g., a `xr.DataArray` on a `ux.Grid`, or `ux.DataArray` on a `parcels.Grid`). It's up to the `Interpolator.assert_is_compatible(Field)` to define what is and is not compatible, and raise `ValueError` / `TypeError` on incompatible data types. The `.assert_is_compatible()` method also acts as developer documentation, defining clearly for the `.interpolate()` method what assumptions it is working on. The `.assert_is_compatible()` method should be lightweight as it will be called on `Field` initialisation.
Expand Down
6 changes: 3 additions & 3 deletions parcels/interaction/interactionkernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,9 @@ def __init__(

numkernelargs = self.check_kernel_signature_on_version()

assert numkernelargs[0] == 5 and numkernelargs.count(numkernelargs[0]) == len(
numkernelargs
), "Interactionkernels take exactly 5 arguments: particle, fieldset, time, neighbors, mutator"
assert numkernelargs[0] == 5 and numkernelargs.count(numkernelargs[0]) == len(numkernelargs), (
"Interactionkernels take exactly 5 arguments: particle, fieldset, time, neighbors, mutator"
)

def check_fieldsets_in_kernels(self, pyfunc):
# Currently, the implemented interaction kernels do not impose
Expand Down
12 changes: 6 additions & 6 deletions parcels/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,9 @@ def __init__(
for kwvar in kwargs:
if kwvar not in ["partition_function"]:
kwargs[kwvar] = convert_to_flat_array(kwargs[kwvar])
assert (
lon.size == kwargs[kwvar].size
), f"{kwvar} and positions (lon, lat, depth) don't have the same lengths."
assert lon.size == kwargs[kwvar].size, (
f"{kwvar} and positions (lon, lat, depth) don't have the same lengths."
)

self._data = {
"lon": lon.astype(lonlatdepth_dtype),
Expand Down Expand Up @@ -233,9 +233,9 @@ def add(self, particles):
The current ParticleSet

"""
assert (
particles is not None
), f"Trying to add another {type(self)} to this one, but the other one is None - invalid operation."
assert particles is not None, (
f"Trying to add another {type(self)} to this one, but the other one is None - invalid operation."
)
assert type(particles) is type(self)

if len(particles) == 0:
Expand Down
2 changes: 1 addition & 1 deletion parcels/tools/timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def print_tree_sequential(self, step=0, root_time=0, parent_time=0):
if step > 0:
print(f"({round(time / parent_time * 100):3d}%) ", end="")
t_str = f"{time:1.3e} s" if root_time < 300 else datetime.timedelta(seconds=time)
print(f"Timer {(self._name).ljust(20 - 2*step + 7*(step == 0))}: {t_str}")
print(f"Timer {(self._name).ljust(20 - 2 * step + 7 * (step == 0))}: {t_str}")
for child in self._children:
child.print_tree_sequential(step + 1, root_time, time)

Expand Down
3 changes: 1 addition & 2 deletions parcels/xgcm/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,8 +516,7 @@ def check_neighbor(link, position):
neighbor_link = face_links[idx][ax][correct_position]
except (KeyError, IndexError):
raise KeyError(
f"Couldn't find a face link for face {idx!r}"
f"in axis {ax!r} at position {correct_position!r}"
f"Couldn't find a face link for face {idx!r}in axis {ax!r} at position {correct_position!r}"
)
idx_n, ax_n, rev_n = neighbor_link
if ax not in self.axes:
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ ignore = [
"RUF015",
# Use `X | Y` in `isinstance` (see https://github.com/home-assistant/core/issues/123850)
"UP038",
"RUF046", # Value being cast to `int` is already an integer

# TODO: ignore for now (requires more work). Remove ignore once fixed
# Missing docstring in public module
Expand Down
Loading