Skip to content

Commit

Permalink
Merge branch 'master' into croco_3D_velocities
Browse files Browse the repository at this point in the history
  • Loading branch information
erikvansebille committed Aug 16, 2024
2 parents b9f0827 + fcb5c96 commit 0731ffe
Show file tree
Hide file tree
Showing 13 changed files with 29 additions and 25 deletions.
3 changes: 3 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,6 @@ e99e0d170a7dc128031b82ea00cdf9e9a0164b03

# Run black on examples
cbf96c4e1f58fa6348695d7553900ba3f7c3383b

# Run Ruff format on codebase
19dd7eb1370063f252f94dd26bf313ff71484876
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,7 +399,7 @@ def linkcode_resolve(domain, info):
master_doc,
"parcels.tex",
"Parcels Documentation",
"M. Lange, E. van Sebille",
"The Parcels developers",
"manual",
),
]
Expand Down
12 changes: 6 additions & 6 deletions parcels/application_kernels/advection.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ def AdvectionRK4_3D(particle, fieldset, time):
lat3 = particle.lat + v3 * particle.dt
dep3 = particle.depth + w3 * particle.dt
(u4, v4, w4) = fieldset.UVW[time + particle.dt, dep3, lat3, lon3, particle]
particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6.0 * particle.dt # noqa
particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6.0 * particle.dt # noqa
particle_ddepth += (w1 + 2 * w2 + 2 * w3 + w4) / 6.0 * particle.dt # noqa
particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6 * particle.dt # noqa
particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6 * particle.dt # noqa
particle_ddepth += (w1 + 2 * w2 + 2 * w3 + w4) / 6 * particle.dt # noqa


def AdvectionRK4_3D_CROCO(particle, fieldset, time):
Expand Down Expand Up @@ -81,15 +81,15 @@ def AdvectionRK4_3D_CROCO(particle, fieldset, time):
sig_dep4 = sig_dep + w4 * particle.dt
dep4 = sig_dep4 * fieldset.H[time, 0, lat4, lon4]

particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6.0 * particle.dt # noqa
particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6.0 * particle.dt # noqa
particle_dlon += (u1 + 2 * u2 + 2 * u3 + u4) / 6 * particle.dt # noqa
particle_dlat += (v1 + 2 * v2 + 2 * v3 + v4) / 6 * particle.dt # noqa
particle_ddepth += ( # noqa
(dep1 - particle.depth) * 2
+ 2 * (dep2 - particle.depth) * 2
+ 2 * (dep3 - particle.depth)
+ dep4
- particle.depth
) / 6.0
) / 6


def AdvectionEE(particle, fieldset, time):
Expand Down
2 changes: 1 addition & 1 deletion parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,7 +629,7 @@ def visit_Assign(self, node):
self.array_vars += [node.targets[0].id]
elif isinstance(node.value, ParticleXiYiZiTiAttributeNode):
raise RuntimeError(
f"Add index of the grid when using particle.{node.value.attr} " f"(e.g. particle.{node.value.attr}[0])."
f"Add index of the grid when using particle.{node.value.attr} (e.g. particle.{node.value.attr}[0])."
)
else:
node.ccode = c.Assign(node.targets[0].ccode, node.value.ccode)
Expand Down
2 changes: 1 addition & 1 deletion parcels/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -1565,7 +1565,7 @@ def write(self, filename, varname=None):
if varname is None:
varname = self.name
# Derive name of 'depth' variable for NEMO convention
vname_depth = "depth%s" % self.name.lower()
vname_depth = f"depth{self.name.lower()}"

# Create DataArray objects for file I/O
if self.grid.gtype == GridType.RectilinearZGrid:
Expand Down
4 changes: 2 additions & 2 deletions parcels/interaction/interactionkernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def __init__(

if self._ptype.uses_jit:
raise NotImplementedError(
"JIT mode is not supported for" " InteractionKernels. Please run your" " simulation in SciPy mode."
"JIT mode is not supported for InteractionKernels. Please run your simulation in SciPy mode."
)

for func in self._pyfunc:
Expand Down Expand Up @@ -166,7 +166,7 @@ def cleanup_unload_lib(lib):

def execute_jit(self, pset, endtime, dt):
raise NotImplementedError(
"JIT mode is not supported for" " InteractionKernels. Please run your" " simulation in SciPy mode."
"JIT mode is not supported for InteractionKernels. Please run your simulation in SciPy mode."
)

def execute_python(self, pset, endtime, dt):
Expand Down
4 changes: 2 additions & 2 deletions parcels/particledata.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def __init__(self, pclass, lon, lat, depth, time, lonlatdepth_dtype, pid_orig, n
assert (
depth is not None
), "particle's initial depth is None - incompatible with the ParticleData class. Invalid state."
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don" "t all have the same lenghts."
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts."

assert lon.size == time.size, "time and positions (lon, lat, depth) don" "t have the same lengths."
assert lon.size == time.size, "time and positions (lon, lat, depth) don't have the same lengths."

# If a partitioning function for MPI runs has been passed into the
# particle creation with the "partition_function" kwarg, retrieve it here.
Expand Down
7 changes: 4 additions & 3 deletions parcels/particlefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,9 +212,10 @@ def write(self, pset, time, indices=None):
logger.warning("ParticleSet is empty on writing as array at time %g" % time)
return

indices_to_write = (
pset.particledata._to_write_particles(pset.particledata._data, time) if indices is None else indices
)
if indices is None:
indices_to_write = pset.particledata._to_write_particles(pset.particledata._data, time)
else:
indices_to_write = indices

if len(indices_to_write) > 0:
pids = pset.particledata.getvardata("id", indices_to_write)
Expand Down
6 changes: 3 additions & 3 deletions parcels/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def ArrayClass_init(self, *args, **kwargs):
depth = np.ones(lon.size) * mindepth
else:
depth = convert_to_flat_array(depth)
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don" "t all have the same lenghts"
assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts"

time = convert_to_flat_array(time)
time = np.repeat(time, lon.size) if time.size == 1 else time
Expand All @@ -176,7 +176,7 @@ def ArrayClass_init(self, *args, **kwargs):
if time.size > 0 and isinstance(time[0], np.timedelta64) and not self.time_origin:
raise NotImplementedError("If fieldset.time_origin is not a date, time of a particle must be a double")
time = np.array([self.time_origin.reltime(t) if _convert_to_reltime(t) else t for t in time])
assert lon.size == time.size, "time and positions (lon, lat, depth) don" "t have the same lengths."
assert lon.size == time.size, "time and positions (lon, lat, depth) do not have the same lengths."

if lonlatdepth_dtype is None:
lonlatdepth_dtype = self.lonlatdepth_dtype_from_field_interp_method(fieldset.U)
Expand Down Expand Up @@ -244,7 +244,7 @@ def ArrayClass_init(self, *args, **kwargs):
else:
interaction_class = KDTreeFlatNeighborSearch
else:
assert False, "Interaction is only possible on 'flat' and " "'spherical' meshes"
assert False, "Interaction is only possible on 'flat' and 'spherical' meshes"
try:
if len(interaction_distance) == 2:
inter_dist_vert, inter_dist_horiz = interaction_distance
Expand Down
6 changes: 2 additions & 4 deletions parcels/tools/converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,10 +281,8 @@ def convert_xarray_time_units(ds, time):
da2 = xr.decode_cf(da2)
except ValueError:
raise RuntimeError(
"Xarray could not convert the calendar. If you"
"re using from_netcdf, "
"Xarray could not convert the calendar. If you're using from_netcdf, "
"try using the timestamps keyword in the construction of your Field. "
"See also the tutorial at https://docs.oceanparcels.org/en/latest/"
"examples/tutorial_timestamps.html"
"See also the tutorial at https://docs.oceanparcels.org/en/latest/examples/tutorial_timestamps.html"
)
ds[time] = da2[time]
2 changes: 1 addition & 1 deletion parcels/tools/exampledata_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def download_example_dataset(dataset: str, data_home=None):
# Dev note: `dataset` is assumed to be a folder name with netcdf files
if dataset not in example_data_files:
raise ValueError(
f"Dataset {dataset!r} not found. Available datasets are: " ", ".join(example_data_files.keys())
f"Dataset {dataset!r} not found. Available datasets are: " + ", ".join(example_data_files.keys())
)

cache_folder = get_data_home(data_home)
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ select = [
"ICN", # import conventions
"G", # logging-format
"RUF", # ruff
"ISC001", # single-line-implicit-string-concatenation
]

ignore = [
Expand Down
3 changes: 2 additions & 1 deletion tests/test_kernel_language.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@


def expr_kernel(name, pset, expr):
pycode = f"def {name}(particle, fieldset, time):\n" f" particle.p = {expr}"
pycode = (f"def {name}(particle, fieldset, time):\n"
f" particle.p = {expr}") # fmt: skip
return Kernel(
pset.fieldset, pset.particledata.ptype, pyfunc=None, funccode=pycode, funcname=name, funcvars=["particle"]
)
Expand Down

0 comments on commit 0731ffe

Please sign in to comment.