Skip to content

Commit

Permalink
Update zips
Browse files Browse the repository at this point in the history
Prior behaviour didn't require x and y to be same length. PEP618 in Python 3.10 introduced strict flag. True raises ValueError if x and y aren't same length.

Setting values here based on intuition. This was flagged by Bugbear in Ruff checks (B905)
  • Loading branch information
VeckoTheGecko committed Aug 15, 2024
1 parent 6ebb549 commit 46636ca
Show file tree
Hide file tree
Showing 9 changed files with 28 additions and 26 deletions.
2 changes: 1 addition & 1 deletion parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,7 @@ def visit_Assign(self, node):
if len(t_elts) != len(v_elts):
raise AttributeError("Tuple lengths in assignment do not agree")
node = [ast.Assign() for _ in t_elts]
for n, t, v in zip(node, t_elts, v_elts):
for n, t, v in zip(node, t_elts, v_elts, strict=True):
n.targets = [t]
n.value = v
return node
Expand Down
10 changes: 5 additions & 5 deletions parcels/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -569,7 +569,7 @@ def from_netcdf(
# Pre-allocate data before reading files into buffer
data_list = []
ti = 0
for tslice, fname in zip(grid.timeslices, data_filenames):
for tslice, fname in zip(grid.timeslices, data_filenames, strict=True):
with _field_fb_class(
fname,
dimensions,
Expand Down Expand Up @@ -798,8 +798,8 @@ def calc_cell_edge_sizes(self):

x_conv = GeographicPolar() if self.grid.mesh == "spherical" else UnitConverter()
y_conv = Geographic() if self.grid.mesh == "spherical" else UnitConverter()
for y, (lat, dy) in enumerate(zip(self.grid.lat, np.gradient(self.grid.lat))):
for x, (lon, dx) in enumerate(zip(self.grid.lon, np.gradient(self.grid.lon))):
for y, (lat, dy) in enumerate(zip(self.grid.lat, np.gradient(self.grid.lat), strict=False)):
for x, (lon, dx) in enumerate(zip(self.grid.lon, np.gradient(self.grid.lon), strict=False)):
self.grid.cell_edge_sizes["x"][y, x] = x_conv.to_source(dx, lon, lat, self.grid.depth[0])
self.grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, lon, lat, self.grid.depth[0])
self.cell_edge_sizes = self.grid.cell_edge_sizes
Expand Down Expand Up @@ -2295,13 +2295,13 @@ def __init__(self, name, F, V=None, W=None):
), "Components of a NestedField must be Field or VectorField"
self.append(Fi)
elif W is None:
for i, Fi, Vi in zip(range(len(F)), F, V):
for i, Fi, Vi in zip(range(len(F)), F, V, strict=True):
assert isinstance(Fi, Field) and isinstance(
Vi, Field
), "F, and V components of a NestedField must be Field"
self.append(VectorField(name + "_%d" % i, Fi, Vi))
else:
for i, Fi, Vi, Wi in zip(range(len(F)), F, V, W):
for i, Fi, Vi, Wi in zip(range(len(F)), F, V, W, strict=True):
assert (
isinstance(Fi, Field) and isinstance(Vi, Field) and isinstance(Wi, Field)
), "F, V and W components of a NestedField must be Field"
Expand Down
2 changes: 1 addition & 1 deletion parcels/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def check_velocityfields(U, V, W):

if isinstance(self.U, NestedField):
w = self.W if hasattr(self, "W") else [None] * len(self.U)
for U, V, W in zip(self.U, self.V, w):
for U, V, W in zip(self.U, self.V, w, strict=True):
check_velocityfields(U, V, W)
else:
W = self.W if hasattr(self, "W") else None
Expand Down
2 changes: 1 addition & 1 deletion parcels/interaction/neighborsearch/basehash.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,4 +197,4 @@ def hash_split(hash_ids, active_idx=None):
unq_items = a_sorted[unq_first]
unq_count = np.diff(np.nonzero(unq_first)[0])
unq_idx = np.split(sort_idx, np.cumsum(unq_count))
return dict(zip(unq_items, unq_idx))
return dict(zip(unq_items, unq_idx, strict=True))
2 changes: 1 addition & 1 deletion parcels/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ def __init__(
fieldset = self.fieldset
for f in self.vector_field_args.values():
Wname = f.W.ccode_name if f.W else "not_defined"
for sF_name, sF_component in zip([f.U.ccode_name, f.V.ccode_name, Wname], ["U", "V", "W"]):
for sF_name, sF_component in zip([f.U.ccode_name, f.V.ccode_name, Wname], ["U", "V", "W"], strict=True):
if sF_name not in self.field_args:
if sF_name != "not_defined":
self.field_args[sF_name] = getattr(f, sF_component)
Expand Down
20 changes: 10 additions & 10 deletions tests/test_advection.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,8 +352,8 @@ def test_stationary_eddy(fieldset_stationary, mode, method, rtol, diffField, npa
pset = ParticleSet(fieldset, pclass=pclass, lon=lon, lat=lat)
pset.execute(kernel[method], dt=dt, endtime=endtime)

exp_lon = [truth_stationary(x, y, pset[0].time)[0] for x, y in zip(lon, lat)]
exp_lat = [truth_stationary(x, y, pset[0].time)[1] for x, y in zip(lon, lat)]
exp_lon = [truth_stationary(x, y, pset[0].time)[0] for x, y in zip(lon, lat, strict=True)]
exp_lat = [truth_stationary(x, y, pset[0].time)[1] for x, y in zip(lon, lat, strict=True)]
assert np.allclose(pset.lon, exp_lon, rtol=rtol)
assert np.allclose(pset.lat, exp_lat, rtol=rtol)

Expand All @@ -380,8 +380,8 @@ def test_stationary_eddy_vertical(mode, npart=1):

pset = ParticleSet(fieldset, pclass=ptype[mode], lon=lon, lat=lat, depth=depth)
pset.execute(AdvectionRK4_3D, dt=dt, endtime=endtime)
exp_lon = [truth_stationary(x, z, pset[0].time)[0] for x, z in zip(lon, depth)]
exp_depth = [truth_stationary(x, z, pset[0].time)[1] for x, z in zip(lon, depth)]
exp_lon = [truth_stationary(x, z, pset[0].time)[0] for x, z in zip(lon, depth, strict=True)]
exp_depth = [truth_stationary(x, z, pset[0].time)[1] for x, z in zip(lon, depth, strict=True)]
print(pset, exp_lon)
assert np.allclose(pset.lon, exp_lon, rtol=1e-5)
assert np.allclose(pset.lat, lat, rtol=1e-5)
Expand All @@ -392,8 +392,8 @@ def test_stationary_eddy_vertical(mode, npart=1):

pset = ParticleSet(fieldset, pclass=ptype[mode], lon=lon, lat=lat, depth=depth)
pset.execute(AdvectionRK4_3D, dt=dt, endtime=endtime)
exp_depth = [truth_stationary(z, y, pset[0].time)[0] for z, y in zip(depth, lat)]
exp_lat = [truth_stationary(z, y, pset[0].time)[1] for z, y in zip(depth, lat)]
exp_depth = [truth_stationary(z, y, pset[0].time)[0] for z, y in zip(depth, lat, strict=True)]
exp_lat = [truth_stationary(z, y, pset[0].time)[1] for z, y in zip(depth, lat, strict=True)]
assert np.allclose(pset.lon, lon, rtol=1e-5)
assert np.allclose(pset.lat, exp_lat, rtol=1e-5)
assert np.allclose(pset.depth, exp_depth, rtol=1e-5)
Expand Down Expand Up @@ -453,8 +453,8 @@ def test_moving_eddy(fieldset_moving, mode, method, rtol, diffField, npart=1):
pset = ParticleSet(fieldset, pclass=pclass, lon=lon, lat=lat)
pset.execute(kernel[method], dt=dt, endtime=endtime)

exp_lon = [truth_moving(x, y, t)[0] for x, y, t in zip(lon, lat, pset.time)]
exp_lat = [truth_moving(x, y, t)[1] for x, y, t in zip(lon, lat, pset.time)]
exp_lon = [truth_moving(x, y, t)[0] for x, y, t in zip(lon, lat, pset.time, strict=True)]
exp_lat = [truth_moving(x, y, t)[1] for x, y, t in zip(lon, lat, pset.time, strict=True)]
assert np.allclose(pset.lon, exp_lon, rtol=rtol)
assert np.allclose(pset.lat, exp_lat, rtol=rtol)

Expand Down Expand Up @@ -531,8 +531,8 @@ def test_decaying_eddy(fieldset_decaying, mode, method, rtol, diffField, npart=1
pset = ParticleSet(fieldset, pclass=pclass, lon=lon, lat=lat)
pset.execute(kernel[method], dt=dt, endtime=endtime)

exp_lon = [truth_decaying(x, y, t)[0] for x, y, t in zip(lon, lat, pset.time)]
exp_lat = [truth_decaying(x, y, t)[1] for x, y, t in zip(lon, lat, pset.time)]
exp_lon = [truth_decaying(x, y, t)[0] for x, y, t in zip(lon, lat, pset.time, strict=True)]
exp_lat = [truth_decaying(x, y, t)[1] for x, y, t in zip(lon, lat, pset.time, strict=True)]
assert np.allclose(pset.lon, exp_lon, rtol=rtol)
assert np.allclose(pset.lat, exp_lat, rtol=rtol)

Expand Down
4 changes: 3 additions & 1 deletion tests/test_fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,9 @@ def test_field_from_netcdf_variables():
f3 = Field.from_netcdf(filename, variable, dims)


@pytest.mark.parametrize("calendar, cftime_datetime", zip(_get_cftime_calendars(), _get_cftime_datetimes()))
@pytest.mark.parametrize(
"calendar, cftime_datetime", zip(_get_cftime_calendars(), _get_cftime_datetimes(), strict=True)
)
def test_fieldset_nonstandardtime(
calendar, cftime_datetime, tmpdir, filename="test_nonstandardtime.nc", xdim=4, ydim=6
):
Expand Down
8 changes: 4 additions & 4 deletions tests/test_particlefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,17 +322,17 @@ def SampleP(particle, fieldset, time):
assert (pxi0[p, 0] == 0) and (pxi0[p, -1] == pset[p].pxi0) # check that particle has moved
assert np.all(pxi1[p, :6] == 0) # check that particle has not been sampled on grid 1 until time 6
assert np.all(pxi1[p, 6:] > 0) # check that particle has not been sampled on grid 1 after time 6
for xi, lon in zip(pxi0[p, 1:], lons[p, 1:]):
for xi, lon in zip(pxi0[p, 1:], lons[p, 1:], strict=True):
assert fieldset.U.grid.lon[xi] <= lon < fieldset.U.grid.lon[xi + 1]
for xi, lon in zip(pxi1[p, 6:], lons[p, 6:]):
for xi, lon in zip(pxi1[p, 6:], lons[p, 6:], strict=True):
assert fieldset.P.grid.lon[xi] <= lon < fieldset.P.grid.lon[xi + 1]
for yi, lat in zip(pyi[p, 1:], lats[p, 1:]):
for yi, lat in zip(pyi[p, 1:], lats[p, 1:], strict=True):
assert fieldset.U.grid.lat[yi] <= lat < fieldset.U.grid.lat[yi + 1]
ds.close()


def test_set_calendar():
for _calendar_name, cf_datetime in zip(_get_cftime_calendars(), _get_cftime_datetimes()):
for _calendar_name, cf_datetime in zip(_get_cftime_calendars(), _get_cftime_datetimes(), strict=True):
date = getattr(cftime, cf_datetime)(1990, 1, 1)
assert _set_calendar(date.calendar) == date.calendar
assert _set_calendar("np_datetime64") == "standard"
Expand Down
4 changes: 2 additions & 2 deletions tests/test_particlesets.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def test_pset_remove_index(fieldset, mode, npart=100):
lon = np.linspace(0, 1, npart)
lat = np.linspace(1, 0, npart)
pset = ParticleSet(fieldset, lon=lon, lat=lat, pclass=ptype[mode], lonlatdepth_dtype=np.float64)
for ilon, ilat in zip(lon[::-1], lat[::-1]):
for ilon, ilat in zip(lon[::-1], lat[::-1], strict=True):
assert pset[-1].lon == ilon
assert pset[-1].lat == ilat
pset.remove_indices(-1)
Expand All @@ -336,7 +336,7 @@ def test_pset_remove_particle(fieldset, mode, npart=100):
lon = np.linspace(0, 1, npart)
lat = np.linspace(1, 0, npart)
pset = ParticleSet(fieldset, lon=lon, lat=lat, pclass=ptype[mode])
for ilon, ilat in zip(lon[::-1], lat[::-1]):
for ilon, ilat in zip(lon[::-1], lat[::-1], strict=True):
assert pset.lon[-1] == ilon
assert pset.lat[-1] == ilat
pset.remove_indices(pset[-1])
Expand Down

0 comments on commit 46636ca

Please sign in to comment.