Skip to content
18 changes: 9 additions & 9 deletions src/parcels/_core/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,7 @@ def __init__(

self._kernels: list[Callable] = kernels

if pset._requires_prepended_positionupdate_kernel:
self.prepend_positionupdate_kernel()
self._position_update = self._make_position_update()
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not directly call self._make_position_update()? Why (effectively) rename the function here?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That would be much simpler than what I concocted.


@property #! Ported from v3. To be removed in v4? (/find another way to name kernels in output file)
def funcname(self):
Expand All @@ -108,7 +107,7 @@ def remove_deleted(self, pset):
if len(indices) > 0:
pset.remove_indices(indices)

def prepend_positionupdate_kernel(self):
def _make_position_update(self):
# Adding kernels that set and update the coordinate changes
def PositionUpdate(particles, fieldset): # pragma: no cover
particles.lon += particles.dlon
Expand All @@ -124,7 +123,7 @@ def PositionUpdate(particles, fieldset): # pragma: no cover
# Update dt in case it's increased in RK45 kernel
particles.dt = particles.next_dt

self._kernels = [PositionUpdate] + self._kernels
return PositionUpdate

def check_fieldsets_in_kernels(self, kernel): # TODO v4: this can go into another method? assert_is_compatible()?
"""
Expand Down Expand Up @@ -221,6 +220,12 @@ def execute(self, pset, endtime, dt):
f(pset[repeat_particles], self._fieldset)
repeat_particles = pset.state == StatusCode.Repeat

# apply position/time update only to particles still in a normal state
# (particles that signalled Stop*/Delete/errors should not have time/position advanced)
Comment on lines +223 to +224
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So was this the fix to the bug in #2568? As simple as that?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The fix was really to have explicit initial condition IO and file output after each call to positionupdate. The previous logic, which pre-pended positionupdate after the first call to kernel.execute() inadvertently wrote the initial condition twice, once for the initial time and once for the initial time + dt; this introduced the "off-by-dt" error in the particlefile output.

update_particles = evaluate_particles & np.isin(pset.state, [StatusCode.Evaluate, StatusCode.Success])
if np.any(update_particles):
self._position_update(pset[update_particles], self._fieldset)

# revert to original dt (unless in RK45 mode)
if not hasattr(self.fieldset, "RK45_tol"):
pset._data["dt"][:] = dt
Expand All @@ -244,9 +249,4 @@ def execute(self, pset, endtime, dt):
else:
error_func(pset[inds].z, pset[inds].lat, pset[inds].lon)

# Only prepend PositionUpdate kernel at the end of the first execute call to avoid adding dt to time too early
if not pset._requires_prepended_positionupdate_kernel:
self.prepend_positionupdate_kernel()
pset._requires_prepended_positionupdate_kernel = True

return pset
8 changes: 6 additions & 2 deletions src/parcels/_core/particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ def __init__(
self._data[kwvar][:] = kwval

self._kernel = None
self._requires_prepended_positionupdate_kernel = False

def __del__(self):
if self._data is not None and isinstance(self._data, xr.Dataset):
Expand Down Expand Up @@ -422,7 +421,12 @@ def execute(
pbar = tqdm(total=end_time - start_time, file=sys.stdout)
pbar.set_description("Integration time: " + str(start_time))

next_output = start_time if output_file else None
next_output = None
if output_file:
# Write the initial condition
output_file.write(self, start_time)
# Increment the next_output
next_output = start_time + outputdt

time = start_time
while sign_dt * (time - end_time) < 0:
Expand Down
57 changes: 57 additions & 0 deletions src/parcels/_datasets/unstructured/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,9 +405,66 @@ def _icon_square_delaunay_uniform_z_coordinate():
return ux.UxDataset({"U": u, "V": v, "W": w, "p": p}, uxgrid=uxgrid)


def _ux_constant_flow_face_centered_2D():
NX = 10
NT = 2
lon, lat = np.meshgrid(
np.linspace(0, 20, NX, dtype=np.float64),
np.linspace(0, 20, NX, dtype=np.float64),
)
lon_flat, lat_flat = lon.ravel(), lat.ravel()
mask = np.isclose(lon_flat, 0) | np.isclose(lon_flat, 20) | np.isclose(lat_flat, 0) | np.isclose(lat_flat, 20)
uxgrid = ux.Grid.from_points(
(lon_flat, lat_flat),
method="regional_delaunay",
boundary_points=np.flatnonzero(mask),
)
uxgrid.attrs["Conventions"] = "UGRID-1.0"

# --- Uniform velocity field on face centers ---
U0 = 0.001 # degrees/s
V0 = 0.0
TIME = xr.date_range("2000-01-01", periods=NT, freq="1h")
zf = np.array([0.0, 1.0])
zc = np.array([0.5])

U = np.full((NT, 1, uxgrid.n_face), U0)
V = np.full((NT, 1, uxgrid.n_face), V0)
W = np.zeros((NT, 2, uxgrid.n_node))

ds = ux.UxDataset(
{
"U": ux.UxDataArray(
U,
uxgrid=uxgrid,
dims=["time", "zc", "n_face"],
coords=dict(time=(["time"], TIME), zc=(["zc"], zc)),
attrs=dict(location="face", mesh="delaunay", Conventions="UGRID-1.0"),
),
"V": ux.UxDataArray(
V,
uxgrid=uxgrid,
dims=["time", "zc", "n_face"],
coords=dict(time=(["time"], TIME), zc=(["zc"], zc)),
attrs=dict(location="face", mesh="delaunay", Conventions="UGRID-1.0"),
),
"W": ux.UxDataArray(
W,
uxgrid=uxgrid,
dims=["time", "zf", "n_node"],
coords=dict(time=(["time"], TIME), nz=(["zf"], zf)),
attrs=dict(location="node", mesh="delaunay", Conventions="UGRID-1.0"),
),
},
uxgrid=uxgrid,
)
return ds


datasets = {
"stommel_gyre_delaunay": _stommel_gyre_delaunay(),
"fesom2_square_delaunay_uniform_z_coordinate": _fesom2_square_delaunay_uniform_z_coordinate(),
"fesom2_square_delaunay_antimeridian": _fesom2_square_delaunay_antimeridian(),
"icon_square_delaunay_uniform_z_coordinate": _icon_square_delaunay_uniform_z_coordinate(),
"ux_constant_flow_face_centered_2D": _ux_constant_flow_face_centered_2D(),
}
2 changes: 1 addition & 1 deletion tests/test_advection.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def test_advection_zonal_periodic():
startlon = np.array([0.5, 0.4])
pset = ParticleSet(fieldset, pclass=PeriodicParticle, lon=startlon, lat=[0.5, 0.5])
pset.execute([AdvectionEE, periodicBC], runtime=np.timedelta64(40, "s"), dt=np.timedelta64(1, "s"))
np.testing.assert_allclose(pset.total_dlon, 4.1, atol=1e-5)
np.testing.assert_allclose(pset.total_dlon, 4.0, atol=1e-5)
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice to see that you also fixed all these 'bugs'!

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With the extra update call that was there internally, there was a ton of these assertions that were right for the wrong reasons :)

np.testing.assert_allclose(pset.lon, startlon, atol=1e-5)
np.testing.assert_allclose(pset.lat, 0.5, atol=1e-5)

Expand Down
2 changes: 1 addition & 1 deletion tests/test_particleset.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def Addlon(particles, fieldset): # pragma: no cover
particles.dlon += particles.dt

pset.execute(Addlon, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(8, "s"), verbose_progress=False)
assert np.allclose([p.lon + p.dlon for p in pset], [10 - t for t in times])
assert np.allclose([p.lon + p.dlon for p in pset], [8 - t for t in times])


def test_populate_indices(fieldset):
Expand Down
10 changes: 5 additions & 5 deletions tests/test_particleset_execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def SampleU(particles, fieldset): # pragma: no cover
pset = ParticleSet(fieldset, lon=fieldset.U.grid.lon[-1], lat=fieldset.U.grid.lat[-1] + dlat)

with pytest.raises(FieldOutOfBoundError):
pset.execute(SampleU, runtime=np.timedelta64(1, "D"), dt=np.timedelta64(1, "D"))
pset.execute(SampleU, runtime=np.timedelta64(2, "D"), dt=np.timedelta64(1, "D"))


@pytest.mark.parametrize(
Expand All @@ -216,7 +216,7 @@ def AddDt(particles, fieldset): # pragma: no cover
pclass = Particle.add_variable(Variable("added_dt", dtype=np.float32, initial=0))
pset = ParticleSet(fieldset, pclass=pclass, lon=0, lat=0)
pset.execute(AddDt, runtime=dt * 10, dt=dt)
np.testing.assert_allclose(pset[0].added_dt, 11.0 * timedelta_to_float(dt), atol=1e-5)
np.testing.assert_allclose(pset[0].added_dt, 10.0 * timedelta_to_float(dt), atol=1e-5)


def test_pset_remove_particle_in_kernel(fieldset):
Expand Down Expand Up @@ -286,7 +286,7 @@ def AddLon(particles, fieldset): # pragma: no cover
pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times)
pset.execute(AddLon, dt=np.timedelta64(1, "s"), endtime=endtime)

np.testing.assert_array_equal(pset.lon, [9, 7, 0])
np.testing.assert_array_equal(pset.lon, [8, 6, 0])
assert pset.time[0:1] == timedelta_to_float(endtime - fieldset.time_interval.left)
assert pset.time[2] == timedelta_to_float(
start_times[2] - fieldset.time_interval.left
Expand All @@ -299,7 +299,7 @@ def AddLon(particles, fieldset): # pragma: no cover
pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times)
pset.execute(AddLon, dt=-np.timedelta64(1, "s"), endtime=endtime)

np.testing.assert_array_equal(pset.lon, [9, 7, 0])
np.testing.assert_array_equal(pset.lon, [8, 6, 0])
assert pset.time[0:1] == timedelta_to_float(endtime - fieldset.time_interval.left)
assert pset.time[2] == timedelta_to_float(
start_times[2] - fieldset.time_interval.left
Expand Down Expand Up @@ -411,7 +411,7 @@ def KernelCounter(particles, fieldset): # pragma: no cover

pset = ParticleSet(fieldset, lon=np.zeros(1), lat=np.zeros(1))
pset.execute(KernelCounter, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(5, "s"))
assert pset.lon == 4
assert pset.lon == 3
assert pset.dt == 2
assert pset.time == 5

Expand Down
16 changes: 8 additions & 8 deletions tests/test_particlesetview.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@ def ConditionalHeating(particles, fieldset): # pragma: no cover

pset.execute(ConditionalHeating, runtime=np.timedelta64(4, "s"), dt=np.timedelta64(1, "s"))

# After 5 timesteps (0, 1, 2, 3, 4): left particles should be at 15.0, right at 7.5
# After 4 timesteps: left particles should be at 14.0, right at 8.0
left_particles = pset.lon < 0.5
right_particles = pset.lon >= 0.5
assert np.allclose(pset.temp[left_particles], 15.0, atol=1e-6)
assert np.allclose(pset.temp[right_particles], 7.5, atol=1e-6)
assert np.allclose(pset.temp[left_particles], 14.0, atol=1e-6)
assert np.allclose(pset.temp[right_particles], 8.0, atol=1e-6)


def test_particle_mask_progressive_changes(fieldset):
Expand Down Expand Up @@ -134,9 +134,9 @@ def MultiMaskOperations(particles, fieldset): # pragma: no cover
group2 = (pset.lon >= 0.33) & (pset.lon < 0.67)
group3 = pset.lon >= 0.67

assert np.allclose(pset.counter[group1], 6, atol=1e-6) # 6 timesteps * 1
assert np.allclose(pset.counter[group2], 12, atol=1e-6) # 6 timesteps * 2
assert np.allclose(pset.counter[group3], 18, atol=1e-6) # 6 timesteps * 3
assert np.allclose(pset.counter[group1], 5, atol=1e-6) # 5 timesteps * 1
assert np.allclose(pset.counter[group2], 10, atol=1e-6) # 5 timesteps * 2
assert np.allclose(pset.counter[group3], 15, atol=1e-6) # 5 timesteps * 3


def test_particle_mask_empty_mask_handling(fieldset):
Expand Down Expand Up @@ -178,5 +178,5 @@ def MoveLon(particles, fieldset): # pragma: no cover

# Should have deleted edge particles
assert pset.size < initial_size
# Remaining particles should be in the middle range
assert np.all((pset.lon >= 0.2) & (pset.lon <= 0.8))
# Remaining particles should be in the middle range (with 0.02 of total displacement)
assert np.all((pset.lon >= 0.2) & (pset.lon <= 0.82))
25 changes: 25 additions & 0 deletions tests/test_uxadvection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import numpy as np
import pytest

import parcels
from parcels._datasets.unstructured.generic import datasets as datasets_unstructured
from parcels.kernels import (
AdvectionEE,
AdvectionRK2,
AdvectionRK4,
)


@pytest.mark.parametrize("integrator", [AdvectionEE, AdvectionRK2, AdvectionRK4])
def test_ux_constant_flow_face_centered_2D(integrator):
ds = datasets_unstructured["ux_constant_flow_face_centered_2D"]
T = np.timedelta64(3600, "s")
dt = np.timedelta64(300, "s")
dt_s = 300.0

fieldset = parcels.FieldSet.from_ugrid_conventions(ds, mesh="flat")
pset = parcels.ParticleSet(fieldset, lon=[5.0], lat=[5.0])
pfile = parcels.ParticleFile(store="test.zarr", outputdt=dt)
pset.execute(integrator, runtime=T, dt=dt, output_file=pfile, verbose_progress=False)
expected_lon = 8.6
np.testing.assert_allclose(pset.lon, expected_lon, atol=1e-5)
Comment on lines +22 to +25
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But the last locations in the output are not tested here. Also test that these are as expected?

Loading