diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 56ae0cbf0..3a1e371d9 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -6,3 +6,6 @@ e99e0d170a7dc128031b82ea00cdf9e9a0164b03 # Run black on examples cbf96c4e1f58fa6348695d7553900ba3f7c3383b + +# Run Ruff format on codebase +19dd7eb1370063f252f94dd26bf313ff71484876 diff --git a/docs/conf.py b/docs/conf.py index 52eb1bb37..67a252338 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -399,7 +399,7 @@ def linkcode_resolve(domain, info): master_doc, "parcels.tex", "Parcels Documentation", - "M. Lange, E. van Sebille", + "The Parcels developers", "manual", ), ] diff --git a/parcels/application_kernels/advection.py b/parcels/application_kernels/advection.py index 17d89fb13..8c5df40d1 100644 --- a/parcels/application_kernels/advection.py +++ b/parcels/application_kernels/advection.py @@ -42,9 +42,9 @@ def AdvectionRK4_3D(particle, fieldset, time): lat3 = particle.lat + v3 * particle.dt dep3 = particle.depth + w3 * particle.dt (u4, v4, w4) = fieldset.UVW[time + particle.dt, dep3, lat3, lon3, particle] - particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6.0 * particle.dt # noqa - particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6.0 * particle.dt # noqa - particle_ddepth += (w1 + 2 * w2 + 2 * w3 + w4) / 6.0 * particle.dt # noqa + particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6 * particle.dt # noqa + particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6 * particle.dt # noqa + particle_ddepth += (w1 + 2 * w2 + 2 * w3 + w4) / 6 * particle.dt # noqa def AdvectionRK4_3D_CROCO(particle, fieldset, time): @@ -81,15 +81,15 @@ def AdvectionRK4_3D_CROCO(particle, fieldset, time): sig_dep4 = sig_dep + w4 * particle.dt dep4 = sig_dep4 * fieldset.H[time, 0, lat4, lon4] - particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6.0 * particle.dt # noqa - particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6.0 * particle.dt # noqa + particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6 * particle.dt # noqa + particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6 * particle.dt # noqa particle_ddepth += ( # noqa (dep1 - particle.depth) * 2 + 2 * (dep2 - particle.depth) * 2 + 2 * (dep3 - particle.depth) + dep4 - particle.depth - ) / 6.0 + ) / 6 def AdvectionEE(particle, fieldset, time): diff --git a/parcels/compilation/codegenerator.py b/parcels/compilation/codegenerator.py index 606945e0d..d2d4ac707 100644 --- a/parcels/compilation/codegenerator.py +++ b/parcels/compilation/codegenerator.py @@ -629,7 +629,7 @@ def visit_Assign(self, node): self.array_vars += [node.targets[0].id] elif isinstance(node.value, ParticleXiYiZiTiAttributeNode): raise RuntimeError( - f"Add index of the grid when using particle.{node.value.attr} " f"(e.g. particle.{node.value.attr}[0])." + f"Add index of the grid when using particle.{node.value.attr} (e.g. particle.{node.value.attr}[0])." ) else: node.ccode = c.Assign(node.targets[0].ccode, node.value.ccode) diff --git a/parcels/field.py b/parcels/field.py index a61d669a8..768065a2b 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -1565,7 +1565,7 @@ def write(self, filename, varname=None): if varname is None: varname = self.name # Derive name of 'depth' variable for NEMO convention - vname_depth = "depth%s" % self.name.lower() + vname_depth = f"depth{self.name.lower()}" # Create DataArray objects for file I/O if self.grid.gtype == GridType.RectilinearZGrid: diff --git a/parcels/interaction/interactionkernel.py b/parcels/interaction/interactionkernel.py index 1b9da571e..db4c7f897 100644 --- a/parcels/interaction/interactionkernel.py +++ b/parcels/interaction/interactionkernel.py @@ -80,7 +80,7 @@ def __init__( if self._ptype.uses_jit: raise NotImplementedError( - "JIT mode is not supported for" " InteractionKernels. Please run your" " simulation in SciPy mode." + "JIT mode is not supported for InteractionKernels. Please run your simulation in SciPy mode." ) for func in self._pyfunc: @@ -166,7 +166,7 @@ def cleanup_unload_lib(lib): def execute_jit(self, pset, endtime, dt): raise NotImplementedError( - "JIT mode is not supported for" " InteractionKernels. Please run your" " simulation in SciPy mode." + "JIT mode is not supported for InteractionKernels. Please run your simulation in SciPy mode." ) def execute_python(self, pset, endtime, dt): diff --git a/parcels/particledata.py b/parcels/particledata.py index 199a9c08b..1f32cfa26 100644 --- a/parcels/particledata.py +++ b/parcels/particledata.py @@ -68,9 +68,9 @@ def __init__(self, pclass, lon, lat, depth, time, lonlatdepth_dtype, pid_orig, n assert ( depth is not None ), "particle's initial depth is None - incompatible with the ParticleData class. Invalid state." - assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don" "t all have the same lenghts." + assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts." - assert lon.size == time.size, "time and positions (lon, lat, depth) don" "t have the same lengths." + assert lon.size == time.size, "time and positions (lon, lat, depth) don't have the same lengths." # If a partitioning function for MPI runs has been passed into the # particle creation with the "partition_function" kwarg, retrieve it here. diff --git a/parcels/particlefile.py b/parcels/particlefile.py index ba0c5113c..6e047130b 100644 --- a/parcels/particlefile.py +++ b/parcels/particlefile.py @@ -212,9 +212,10 @@ def write(self, pset, time, indices=None): logger.warning("ParticleSet is empty on writing as array at time %g" % time) return - indices_to_write = ( - pset.particledata._to_write_particles(pset.particledata._data, time) if indices is None else indices - ) + if indices is None: + indices_to_write = pset.particledata._to_write_particles(pset.particledata._data, time) + else: + indices_to_write = indices if len(indices_to_write) > 0: pids = pset.particledata.getvardata("id", indices_to_write) diff --git a/parcels/particleset.py b/parcels/particleset.py index 4d1c2ecec..fc4462e14 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -166,7 +166,7 @@ def ArrayClass_init(self, *args, **kwargs): depth = np.ones(lon.size) * mindepth else: depth = convert_to_flat_array(depth) - assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don" "t all have the same lenghts" + assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts" time = convert_to_flat_array(time) time = np.repeat(time, lon.size) if time.size == 1 else time @@ -176,7 +176,7 @@ def ArrayClass_init(self, *args, **kwargs): if time.size > 0 and isinstance(time[0], np.timedelta64) and not self.time_origin: raise NotImplementedError("If fieldset.time_origin is not a date, time of a particle must be a double") time = np.array([self.time_origin.reltime(t) if _convert_to_reltime(t) else t for t in time]) - assert lon.size == time.size, "time and positions (lon, lat, depth) don" "t have the same lengths." + assert lon.size == time.size, "time and positions (lon, lat, depth) do not have the same lengths." if lonlatdepth_dtype is None: lonlatdepth_dtype = self.lonlatdepth_dtype_from_field_interp_method(fieldset.U) @@ -244,7 +244,7 @@ def ArrayClass_init(self, *args, **kwargs): else: interaction_class = KDTreeFlatNeighborSearch else: - assert False, "Interaction is only possible on 'flat' and " "'spherical' meshes" + assert False, "Interaction is only possible on 'flat' and 'spherical' meshes" try: if len(interaction_distance) == 2: inter_dist_vert, inter_dist_horiz = interaction_distance diff --git a/parcels/tools/converters.py b/parcels/tools/converters.py index b1dea2fd4..c8ccf8003 100644 --- a/parcels/tools/converters.py +++ b/parcels/tools/converters.py @@ -281,10 +281,8 @@ def convert_xarray_time_units(ds, time): da2 = xr.decode_cf(da2) except ValueError: raise RuntimeError( - "Xarray could not convert the calendar. If you" - "re using from_netcdf, " + "Xarray could not convert the calendar. If you're using from_netcdf, " "try using the timestamps keyword in the construction of your Field. " - "See also the tutorial at https://docs.oceanparcels.org/en/latest/" - "examples/tutorial_timestamps.html" + "See also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html" ) ds[time] = da2[time] diff --git a/parcels/tools/exampledata_utils.py b/parcels/tools/exampledata_utils.py index f1e75dc6c..e3d56618b 100644 --- a/parcels/tools/exampledata_utils.py +++ b/parcels/tools/exampledata_utils.py @@ -133,7 +133,7 @@ def download_example_dataset(dataset: str, data_home=None): # Dev note: `dataset` is assumed to be a folder name with netcdf files if dataset not in example_data_files: raise ValueError( - f"Dataset {dataset!r} not found. Available datasets are: " ", ".join(example_data_files.keys()) + f"Dataset {dataset!r} not found. Available datasets are: " + ", ".join(example_data_files.keys()) ) cache_folder = get_data_home(data_home) diff --git a/pyproject.toml b/pyproject.toml index 03a103540..f32521551 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ select = [ "ICN", # import conventions "G", # logging-format "RUF", # ruff + "ISC001", # single-line-implicit-string-concatenation ] ignore = [ diff --git a/tests/test_kernel_language.py b/tests/test_kernel_language.py index a71771ad7..19ca2289a 100644 --- a/tests/test_kernel_language.py +++ b/tests/test_kernel_language.py @@ -27,7 +27,8 @@ def expr_kernel(name, pset, expr): - pycode = f"def {name}(particle, fieldset, time):\n" f" particle.p = {expr}" + pycode = (f"def {name}(particle, fieldset, time):\n" + f" particle.p = {expr}") # fmt: skip return Kernel( pset.fieldset, pset.particledata.ptype, pyfunc=None, funccode=pycode, funcname=name, funcvars=["particle"] )