diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 206392a999..4c9f72d4b3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: types: [text] files: \.(json|ipynb)$ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.8.0 hooks: - id: ruff name: ruff lint (.py) diff --git a/docs/examples/example_dask_chunk_OCMs.py b/docs/examples/example_dask_chunk_OCMs.py index e60f00c848..c03d9174d7 100644 --- a/docs/examples/example_dask_chunk_OCMs.py +++ b/docs/examples/example_dask_chunk_OCMs.py @@ -247,7 +247,7 @@ def test_pop(mode, chunk_mode): filenames = str(data_folder / "t.x1_SAMOC_flux.1690*.nc") variables = {"U": "UVEL", "V": "VVEL", "W": "WVEL"} timestamps = np.expand_dims( - np.array([np.datetime64("2000-%.2d-01" % m) for m in range(1, 7)]), axis=1 + np.array([np.datetime64(f"2000-{m:02d}-01") for m in range(1, 7)]), axis=1 ) dimensions = {"lon": "ULON", "lat": "ULAT", "depth": "w_dep"} chs = False diff --git a/docs/examples/example_moving_eddies.py b/docs/examples/example_moving_eddies.py index 1d238aad49..40f679539a 100644 --- a/docs/examples/example_moving_eddies.py +++ b/docs/examples/example_moving_eddies.py @@ -146,7 +146,7 @@ def moving_eddies_example( # Execute for 1 week, with 1 hour timesteps and hourly output runtime = timedelta(days=7) - print("MovingEddies: Advecting %d particles for %s" % (npart, str(runtime))) + print(f"MovingEddies: Advecting {npart} particles for {runtime}") pset.execute( method, runtime=runtime, @@ -177,7 +177,7 @@ def test_moving_eddies_fwdbwd(mode, mesh, tmpdir, npart=2): runtime = timedelta(days=1) dt = timedelta(minutes=5) outputdt = timedelta(hours=1) - print("MovingEddies: Advecting %d particles for %s" % (npart, str(runtime))) + print(f"MovingEddies: Advecting {npart} particles for {runtime}") outfile = tmpdir.join("EddyParticlefwd") pset.execute( method, diff --git a/docs/examples/example_peninsula.py b/docs/examples/example_peninsula.py index dfd65c146c..2ecf888596 100644 --- a/docs/examples/example_peninsula.py +++ b/docs/examples/example_peninsula.py @@ -168,7 +168,7 @@ def peninsula_example( out = ( pset.ParticleFile(name=outfile, outputdt=timedelta(hours=1)) if output else None ) - print("Peninsula: Advecting %d particles for %s" % (npart, str(time))) + print(f"Peninsula: Advecting {npart} particles for {time}") pset.execute(k_adv + k_p, runtime=time, dt=dt, output_file=out) if verbose: diff --git a/docs/examples/example_stommel.py b/docs/examples/example_stommel.py index 58150eec6c..8fefaded17 100755 --- a/docs/examples/example_stommel.py +++ b/docs/examples/example_stommel.py @@ -160,7 +160,7 @@ def stommel_example( maxage = runtime.total_seconds() if maxage is None else maxage fieldset.add_constant("maxage", maxage) - print("Stommel: Advecting %d particles for %s" % (npart, runtime)) + print(f"Stommel: Advecting {npart} particles for {runtime}") parcels.timer.psetinit.stop() parcels.timer.psetrun = parcels.timer.Timer("Pset_run", parent=parcels.timer.pset) pset.execute( diff --git a/parcels/application_kernels/EOSseawaterproperties.py b/parcels/application_kernels/EOSseawaterproperties.py index 167c7111b2..d85b6633a2 100644 --- a/parcels/application_kernels/EOSseawaterproperties.py +++ b/parcels/application_kernels/EOSseawaterproperties.py @@ -2,7 +2,7 @@ import math -__all__ = ["PressureFromLatDepth", "AdiabticTemperatureGradient", "PtempFromTemp", "TempFromPtemp", "UNESCODensity"] +__all__ = ["AdiabticTemperatureGradient", "PressureFromLatDepth", "PtempFromTemp", "TempFromPtemp", "UNESCODensity"] def PressureFromLatDepth(particle, fieldset, time): diff --git a/parcels/application_kernels/advection.py b/parcels/application_kernels/advection.py index 6f1000c994..4ad47ac6d0 100644 --- a/parcels/application_kernels/advection.py +++ b/parcels/application_kernels/advection.py @@ -5,12 +5,12 @@ from parcels.tools.statuscodes import StatusCode __all__ = [ - "AdvectionRK4", + "AdvectionAnalytical", "AdvectionEE", - "AdvectionRK45", + "AdvectionRK4", "AdvectionRK4_3D", - "AdvectionAnalytical", "AdvectionRK4_3D_CROCO", + "AdvectionRK45", ] diff --git a/parcels/application_kernels/advectiondiffusion.py b/parcels/application_kernels/advectiondiffusion.py index 35db28342f..fc3c8594ad 100644 --- a/parcels/application_kernels/advectiondiffusion.py +++ b/parcels/application_kernels/advectiondiffusion.py @@ -7,7 +7,7 @@ import parcels -__all__ = ["DiffusionUniformKh", "AdvectionDiffusionM1", "AdvectionDiffusionEM"] +__all__ = ["AdvectionDiffusionEM", "AdvectionDiffusionM1", "DiffusionUniformKh"] def AdvectionDiffusionM1(particle, fieldset, time): diff --git a/parcels/application_kernels/interaction.py b/parcels/application_kernels/interaction.py index cbf9a3ac80..db3c4e04e7 100644 --- a/parcels/application_kernels/interaction.py +++ b/parcels/application_kernels/interaction.py @@ -4,7 +4,7 @@ from parcels.tools.statuscodes import StatusCode -__all__ = ["AsymmetricAttraction", "NearestNeighborWithinRange", "MergeWithNearestNeighbor"] +__all__ = ["AsymmetricAttraction", "MergeWithNearestNeighbor", "NearestNeighborWithinRange"] def NearestNeighborWithinRange(particle, fieldset, time, neighbors, mutator): diff --git a/parcels/compilation/codegenerator.py b/parcels/compilation/codegenerator.py index 33351ac6c3..f4bc558676 100644 --- a/parcels/compilation/codegenerator.py +++ b/parcels/compilation/codegenerator.py @@ -226,7 +226,7 @@ def __init__(self, fieldset=None, ptype=JITParticle): def get_tmp(self): """Create a new temporary variable name.""" - tmp = "parcels_tmpvar%d" % self._tmp_counter + tmp = f"parcels_tmpvar{self._tmp_counter:d}" self._tmp_counter += 1 self.tmp_vars += [tmp] return tmp diff --git a/parcels/field.py b/parcels/field.py index 17f67eb0da..0871f8595a 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -50,7 +50,7 @@ from parcels.fieldset import FieldSet -__all__ = ["Field", "VectorField", "NestedField"] +__all__ = ["Field", "NestedField", "VectorField"] def _isParticle(key): @@ -1242,7 +1242,7 @@ def _search_indices_curvilinear(self, x, y, z, ti=-1, time=-1, particle=None, se (xi, yi) = self._reconnect_bnd_indices(xi, yi, grid.xdim, grid.ydim, grid.mesh) it += 1 if it > maxIterSearch: - print("Correct cell not found after %d iterations" % maxIterSearch) + print(f"Correct cell not found after {maxIterSearch} iterations") raise FieldOutOfBoundError(x, y, 0, field=self) xsi = max(0.0, xsi) eta = max(0.0, eta) @@ -1630,10 +1630,8 @@ def _chunk_data(self): g = self.grid if isinstance(self.data, da.core.Array): for block_id in range(len(self.grid._load_chunk)): - if ( - g._load_chunk[block_id] == g._chunk_loading_requested - or g._load_chunk[block_id] in g._chunk_loaded - and self._data_chunks[block_id] is None + if g._load_chunk[block_id] == g._chunk_loading_requested or ( + g._load_chunk[block_id] in g._chunk_loaded and self._data_chunks[block_id] is None ): block = self._get_block(block_id) self._data_chunks[block_id] = np.array( @@ -2534,13 +2532,13 @@ def __init__(self, name: str, F, V=None, W=None): assert isinstance(Fi, Field) and isinstance( Vi, Field ), "F, and V components of a NestedField must be Field" - self.append(VectorField(name + "_%d" % i, Fi, Vi)) + self.append(VectorField(f"{name}_{i}", Fi, Vi)) else: for i, Fi, Vi, Wi in zip(range(len(F)), F, V, W, strict=True): assert ( isinstance(Fi, Field) and isinstance(Vi, Field) and isinstance(Wi, Field) ), "F, V and W components of a NestedField must be Field" - self.append(VectorField(name + "_%d" % i, Fi, Vi, Wi)) + self.append(VectorField(f"{name}_{i}", Fi, Vi, Wi)) self.name = name def __getitem__(self, key): diff --git a/parcels/grid.py b/parcels/grid.py index 382cc0f2b7..5acd295109 100644 --- a/parcels/grid.py +++ b/parcels/grid.py @@ -12,14 +12,14 @@ from parcels.tools.warnings import FieldSetWarning __all__ = [ - "GridType", - "GridCode", - "RectilinearZGrid", - "RectilinearSGrid", - "CurvilinearZGrid", - "CurvilinearSGrid", "CGrid", + "CurvilinearSGrid", + "CurvilinearZGrid", "Grid", + "GridCode", + "GridType", + "RectilinearSGrid", + "RectilinearZGrid", ] diff --git a/parcels/interaction/neighborsearch/__init__.py b/parcels/interaction/neighborsearch/__init__.py index e7e3b21893..9b7175a7e4 100644 --- a/parcels/interaction/neighborsearch/__init__.py +++ b/parcels/interaction/neighborsearch/__init__.py @@ -11,9 +11,9 @@ ) __all__ = [ - "HashFlatNeighborSearch", - "HashSphericalNeighborSearch", "BruteFlatNeighborSearch", "BruteSphericalNeighborSearch", + "HashFlatNeighborSearch", + "HashSphericalNeighborSearch", "KDTreeFlatNeighborSearch", ] diff --git a/parcels/kernel.py b/parcels/kernel.py index a9ab26577e..0ee62b65a1 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -42,7 +42,7 @@ ) from parcels.tools.warnings import KernelWarning -__all__ = ["Kernel", "BaseKernel"] +__all__ = ["BaseKernel", "Kernel"] class BaseKernel(abc.ABC): @@ -418,7 +418,7 @@ def get_kernel_compile_files(self): dyn_dir = mpi_comm.bcast(dyn_dir, root=0) basename = cache_name if mpi_rank == 0 else None basename = mpi_comm.bcast(basename, root=0) - basename = basename + "_%d" % mpi_rank + basename = f"{basename}_{mpi_rank}" else: cache_name = ( self._cache_key diff --git a/parcels/particle.py b/parcels/particle.py index 08e46ff08f..d6cd98662d 100644 --- a/parcels/particle.py +++ b/parcels/particle.py @@ -6,7 +6,7 @@ from parcels.tools.statuscodes import StatusCode -__all__ = ["ScipyParticle", "JITParticle", "Variable", "ScipyInteractionParticle"] +__all__ = ["JITParticle", "ScipyInteractionParticle", "ScipyParticle", "Variable"] indicators_64bit = [np.float64, np.uint64, np.int64, c_void_p] @@ -201,7 +201,7 @@ def __del__(self): def __repr__(self): time_string = "not_yet_set" if self.time is None or np.isnan(self.time) else f"{self.time:f}" - p_string = "P[%d](lon=%f, lat=%f, depth=%f, " % (self.id, self.lon, self.lat, self.depth) + p_string = f"P[{self.id}](lon={self.lon:f}, lat={self.lat:f}, depth={self.depth:f}, " for var in vars(type(self)): if var in ["lon_nextloop", "lat_nextloop", "depth_nextloop", "time_nextloop"]: continue diff --git a/parcels/particledata.py b/parcels/particledata.py index 3eb725d03e..516083e830 100644 --- a/parcels/particledata.py +++ b/parcels/particledata.py @@ -460,7 +460,7 @@ def getPType(self): def __repr__(self): time_string = "not_yet_set" if self.time is None or np.isnan(self.time) else f"{self.time:f}" - p_string = "P[%d](lon=%f, lat=%f, depth=%f, " % (self.id, self.lon, self.lat, self.depth) + p_string = f"P[{self.id}](lon={self.lon:f}, lat={self.lat:f}, depth={self.depth:f}, " for var in self._pcoll.ptype.variables: if var.name in [ "lon_nextloop", diff --git a/parcels/particleset.py b/parcels/particleset.py index 86809e2887..e9c07f9873 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -1193,7 +1193,7 @@ def execute( raise RuntimeError( "Field writing during execution only works for Fields with one snapshot in time" ) - fldfilename = str(output_file.fname).replace(".zarr", "_%.4d" % fld.to_write) + fldfilename = str(output_file.fname).replace(".zarr", f"_{fld.to_write:04d}") fld.write(fldfilename) fld.to_write += 1 diff --git a/parcels/rng.py b/parcels/rng.py index bac7fbce87..868f86bf2d 100644 --- a/parcels/rng.py +++ b/parcels/rng.py @@ -10,7 +10,7 @@ from parcels.tools import get_cache_dir, get_package_dir from parcels.tools.loggers import logger -__all__ = ["seed", "random", "uniform", "randint", "normalvariate", "expovariate", "vonmisesvariate"] +__all__ = ["expovariate", "normalvariate", "randint", "random", "seed", "uniform", "vonmisesvariate"] class RandomC: diff --git a/parcels/tools/converters.py b/parcels/tools/converters.py index 3f323b93c6..08b4e84536 100644 --- a/parcels/tools/converters.py +++ b/parcels/tools/converters.py @@ -10,15 +10,15 @@ import xarray as xr __all__ = [ - "UnitConverter", "Geographic", "GeographicPolar", - "GeographicSquare", "GeographicPolarSquare", - "unitconverters_map", + "GeographicSquare", "TimeConverter", - "convert_xarray_time_units", + "UnitConverter", "convert_to_flat_array", + "convert_xarray_time_units", + "unitconverters_map", ] diff --git a/parcels/tools/global_statics.py b/parcels/tools/global_statics.py index f18e5d4d46..317848786b 100644 --- a/parcels/tools/global_statics.py +++ b/parcels/tools/global_statics.py @@ -15,7 +15,7 @@ USER_ID = "tmp" -__all__ = ["cleanup_remove_files", "cleanup_unload_lib", "get_package_dir", "get_cache_dir"] +__all__ = ["cleanup_remove_files", "cleanup_unload_lib", "get_cache_dir", "get_package_dir"] def cleanup_remove_files(lib_file, log_file): diff --git a/parcels/tools/statuscodes.py b/parcels/tools/statuscodes.py index 06186d5888..1ab98bed0c 100644 --- a/parcels/tools/statuscodes.py +++ b/parcels/tools/statuscodes.py @@ -1,12 +1,12 @@ """Handling of Errors and particle status codes""" __all__ = [ - "StatusCode", - "FieldSamplingError", + "AllParcelsErrorCodes", "FieldOutOfBoundError", - "TimeExtrapolationError", + "FieldSamplingError", "KernelError", - "AllParcelsErrorCodes", + "StatusCode", + "TimeExtrapolationError", ] diff --git a/parcels/tools/timer.py b/parcels/tools/timer.py index 311fd7a90d..cd6725fb2f 100644 --- a/parcels/tools/timer.py +++ b/parcels/tools/timer.py @@ -43,10 +43,10 @@ def print_tree_sequential(self, step=0, root_time=0, parent_time=0): time = self.local_time() if step == 0: root_time = time - print(("(%3d%%)" % round(time / root_time * 100)), end="") + print(f"({round(time / root_time * 100):3d}%)", end="") print(" " * (step + 1), end="") if step > 0: - print("(%3d%%) " % round(time / parent_time * 100), end="") + print(f"({round(time / parent_time * 100):3d}%) ", end="") t_str = f"{time:1.3e} s" if root_time < 300 else datetime.timedelta(seconds=time) print(f"Timer {(self._name).ljust(20 - 2*step + 7*(step == 0))}: {t_str}") for child in self._children: @@ -64,6 +64,6 @@ def print_tree(self, step=0, root_time=0, parent_time=0): else: for iproc in range(mpi_size): if iproc == mpi_rank: - print("Proc %d/%d - Timer tree" % (mpi_rank, mpi_size)) + print(f"Proc {mpi_rank}/{mpi_size} - Timer tree") self.print_tree_sequential(step, root_time, parent_time) mpi_comm.Barrier() diff --git a/tests/test_kernel_language.py b/tests/test_kernel_language.py index 3785d90a2e..f349775720 100644 --- a/tests/test_kernel_language.py +++ b/tests/test_kernel_language.py @@ -276,7 +276,7 @@ def test_print(fieldset_unit_mesh, mode, capfd): def kernel(particle, fieldset, time): particle.p = 1e-3 tmp = 5 - print("%d %f %f" % (particle.id, particle.p, tmp)) + print(f"{particle.id} {particle.p:f} {tmp:f}") pset.execute(kernel, endtime=1.0, dt=1.0, verbose_progress=False) out, err = capfd.readouterr() diff --git a/tests/test_mpirun.py b/tests/test_mpirun.py index 690e719d86..cb242fb4a9 100644 --- a/tests/test_mpirun.py +++ b/tests/test_mpirun.py @@ -19,13 +19,10 @@ def test_mpi_run(tmpdir, repeatdt, maxage, nump): outputNoMPI = tmpdir.join("StommelNoMPI.zarr") os.system( - "mpirun -np 2 python %s -p %d -o %s -r %d -a %d -wf False -cpf True" - % (stommel_file, nump, outputMPI_partition_function, repeatdt, maxage) + f"mpirun -np 2 python {stommel_file} -p {nump} -o {outputMPI_partition_function} -r {repeatdt} -a {maxage} -wf False -cpf True" ) - os.system( - "mpirun -np 2 python %s -p %d -o %s -r %d -a %d -wf False" % (stommel_file, nump, outputMPI, repeatdt, maxage) - ) - os.system("python %s -p %d -o %s -r %d -a %d -wf False" % (stommel_file, nump, outputNoMPI, repeatdt, maxage)) + os.system(f"mpirun -np 2 python {stommel_file} -p {nump} -o {outputMPI} -r {repeatdt} -a {maxage} -wf False") + os.system(f"python {stommel_file} -p {nump} -o {outputNoMPI} -r {repeatdt} -a {maxage} -wf False") ds2 = xr.open_zarr(outputNoMPI)