diff --git a/src/parcels/_core/kernel.py b/src/parcels/_core/kernel.py index 60a97d170..a36f788a4 100644 --- a/src/parcels/_core/kernel.py +++ b/src/parcels/_core/kernel.py @@ -80,8 +80,7 @@ def __init__( self._kernels: list[Callable] = kernels - if pset._requires_prepended_positionupdate_kernel: - self.prepend_positionupdate_kernel() + self._position_update = self._make_position_update() @property #! Ported from v3. To be removed in v4? (/find another way to name kernels in output file) def funcname(self): @@ -108,7 +107,7 @@ def remove_deleted(self, pset): if len(indices) > 0: pset.remove_indices(indices) - def prepend_positionupdate_kernel(self): + def _make_position_update(self): # Adding kernels that set and update the coordinate changes def PositionUpdate(particles, fieldset): # pragma: no cover particles.lon += particles.dlon @@ -124,7 +123,7 @@ def PositionUpdate(particles, fieldset): # pragma: no cover # Update dt in case it's increased in RK45 kernel particles.dt = particles.next_dt - self._kernels = [PositionUpdate] + self._kernels + return PositionUpdate def check_fieldsets_in_kernels(self, kernel): # TODO v4: this can go into another method? assert_is_compatible()? """ @@ -221,6 +220,12 @@ def execute(self, pset, endtime, dt): f(pset[repeat_particles], self._fieldset) repeat_particles = pset.state == StatusCode.Repeat + # apply position/time update only to particles still in a normal state + # (particles that signalled Stop*/Delete/errors should not have time/position advanced) + update_particles = evaluate_particles & np.isin(pset.state, [StatusCode.Evaluate, StatusCode.Success]) + if np.any(update_particles): + self._position_update(pset[update_particles], self._fieldset) + # revert to original dt (unless in RK45 mode) if not hasattr(self.fieldset, "RK45_tol"): pset._data["dt"][:] = dt @@ -244,9 +249,4 @@ def execute(self, pset, endtime, dt): else: error_func(pset[inds].z, pset[inds].lat, pset[inds].lon) - # Only prepend PositionUpdate kernel at the end of the first execute call to avoid adding dt to time too early - if not pset._requires_prepended_positionupdate_kernel: - self.prepend_positionupdate_kernel() - pset._requires_prepended_positionupdate_kernel = True - return pset diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index 67e3c5715..06f2732ce 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -130,7 +130,6 @@ def __init__( self._data[kwvar][:] = kwval self._kernel = None - self._requires_prepended_positionupdate_kernel = False def __del__(self): if self._data is not None and isinstance(self._data, xr.Dataset): @@ -422,7 +421,12 @@ def execute( pbar = tqdm(total=end_time - start_time, file=sys.stdout) pbar.set_description("Integration time: " + str(start_time)) - next_output = start_time if output_file else None + next_output = None + if output_file: + # Write the initial condition + output_file.write(self, start_time) + # Increment the next_output + next_output = start_time + outputdt time = start_time while sign_dt * (time - end_time) < 0: diff --git a/src/parcels/_datasets/unstructured/generic.py b/src/parcels/_datasets/unstructured/generic.py index f10ccfeb7..064a35b72 100644 --- a/src/parcels/_datasets/unstructured/generic.py +++ b/src/parcels/_datasets/unstructured/generic.py @@ -405,9 +405,66 @@ def _icon_square_delaunay_uniform_z_coordinate(): return ux.UxDataset({"U": u, "V": v, "W": w, "p": p}, uxgrid=uxgrid) +def _ux_constant_flow_face_centered_2D(): + NX = 10 + NT = 2 + lon, lat = np.meshgrid( + np.linspace(0, 20, NX, dtype=np.float64), + np.linspace(0, 20, NX, dtype=np.float64), + ) + lon_flat, lat_flat = lon.ravel(), lat.ravel() + mask = np.isclose(lon_flat, 0) | np.isclose(lon_flat, 20) | np.isclose(lat_flat, 0) | np.isclose(lat_flat, 20) + uxgrid = ux.Grid.from_points( + (lon_flat, lat_flat), + method="regional_delaunay", + boundary_points=np.flatnonzero(mask), + ) + uxgrid.attrs["Conventions"] = "UGRID-1.0" + + # --- Uniform velocity field on face centers --- + U0 = 0.001 # degrees/s + V0 = 0.0 + TIME = xr.date_range("2000-01-01", periods=NT, freq="1h") + zf = np.array([0.0, 1.0]) + zc = np.array([0.5]) + + U = np.full((NT, 1, uxgrid.n_face), U0) + V = np.full((NT, 1, uxgrid.n_face), V0) + W = np.zeros((NT, 2, uxgrid.n_node)) + + ds = ux.UxDataset( + { + "U": ux.UxDataArray( + U, + uxgrid=uxgrid, + dims=["time", "zc", "n_face"], + coords=dict(time=(["time"], TIME), zc=(["zc"], zc)), + attrs=dict(location="face", mesh="delaunay", Conventions="UGRID-1.0"), + ), + "V": ux.UxDataArray( + V, + uxgrid=uxgrid, + dims=["time", "zc", "n_face"], + coords=dict(time=(["time"], TIME), zc=(["zc"], zc)), + attrs=dict(location="face", mesh="delaunay", Conventions="UGRID-1.0"), + ), + "W": ux.UxDataArray( + W, + uxgrid=uxgrid, + dims=["time", "zf", "n_node"], + coords=dict(time=(["time"], TIME), nz=(["zf"], zf)), + attrs=dict(location="node", mesh="delaunay", Conventions="UGRID-1.0"), + ), + }, + uxgrid=uxgrid, + ) + return ds + + datasets = { "stommel_gyre_delaunay": _stommel_gyre_delaunay(), "fesom2_square_delaunay_uniform_z_coordinate": _fesom2_square_delaunay_uniform_z_coordinate(), "fesom2_square_delaunay_antimeridian": _fesom2_square_delaunay_antimeridian(), "icon_square_delaunay_uniform_z_coordinate": _icon_square_delaunay_uniform_z_coordinate(), + "ux_constant_flow_face_centered_2D": _ux_constant_flow_face_centered_2D(), } diff --git a/tests/test_advection.py b/tests/test_advection.py index c5d6a9ebf..f8b5dc99d 100644 --- a/tests/test_advection.py +++ b/tests/test_advection.py @@ -98,7 +98,7 @@ def test_advection_zonal_periodic(): startlon = np.array([0.5, 0.4]) pset = ParticleSet(fieldset, pclass=PeriodicParticle, lon=startlon, lat=[0.5, 0.5]) pset.execute([AdvectionEE, periodicBC], runtime=np.timedelta64(40, "s"), dt=np.timedelta64(1, "s")) - np.testing.assert_allclose(pset.total_dlon, 4.1, atol=1e-5) + np.testing.assert_allclose(pset.total_dlon, 4.0, atol=1e-5) np.testing.assert_allclose(pset.lon, startlon, atol=1e-5) np.testing.assert_allclose(pset.lat, 0.5, atol=1e-5) diff --git a/tests/test_particleset.py b/tests/test_particleset.py index 46bd05da8..6abc20927 100644 --- a/tests/test_particleset.py +++ b/tests/test_particleset.py @@ -125,7 +125,7 @@ def Addlon(particles, fieldset): # pragma: no cover particles.dlon += particles.dt pset.execute(Addlon, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(8, "s"), verbose_progress=False) - assert np.allclose([p.lon + p.dlon for p in pset], [10 - t for t in times]) + assert np.allclose([p.lon + p.dlon for p in pset], [8 - t for t in times]) def test_populate_indices(fieldset): diff --git a/tests/test_particleset_execute.py b/tests/test_particleset_execute.py index e836b0222..7b3859543 100644 --- a/tests/test_particleset_execute.py +++ b/tests/test_particleset_execute.py @@ -203,7 +203,7 @@ def SampleU(particles, fieldset): # pragma: no cover pset = ParticleSet(fieldset, lon=fieldset.U.grid.lon[-1], lat=fieldset.U.grid.lat[-1] + dlat) with pytest.raises(FieldOutOfBoundError): - pset.execute(SampleU, runtime=np.timedelta64(1, "D"), dt=np.timedelta64(1, "D")) + pset.execute(SampleU, runtime=np.timedelta64(2, "D"), dt=np.timedelta64(1, "D")) @pytest.mark.parametrize( @@ -216,7 +216,7 @@ def AddDt(particles, fieldset): # pragma: no cover pclass = Particle.add_variable(Variable("added_dt", dtype=np.float32, initial=0)) pset = ParticleSet(fieldset, pclass=pclass, lon=0, lat=0) pset.execute(AddDt, runtime=dt * 10, dt=dt) - np.testing.assert_allclose(pset[0].added_dt, 11.0 * timedelta_to_float(dt), atol=1e-5) + np.testing.assert_allclose(pset[0].added_dt, 10.0 * timedelta_to_float(dt), atol=1e-5) def test_pset_remove_particle_in_kernel(fieldset): @@ -286,7 +286,7 @@ def AddLon(particles, fieldset): # pragma: no cover pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times) pset.execute(AddLon, dt=np.timedelta64(1, "s"), endtime=endtime) - np.testing.assert_array_equal(pset.lon, [9, 7, 0]) + np.testing.assert_array_equal(pset.lon, [8, 6, 0]) assert pset.time[0:1] == timedelta_to_float(endtime - fieldset.time_interval.left) assert pset.time[2] == timedelta_to_float( start_times[2] - fieldset.time_interval.left @@ -299,7 +299,7 @@ def AddLon(particles, fieldset): # pragma: no cover pset = ParticleSet(fieldset, lon=np.zeros(len(start_times)), lat=np.zeros(len(start_times)), time=start_times) pset.execute(AddLon, dt=-np.timedelta64(1, "s"), endtime=endtime) - np.testing.assert_array_equal(pset.lon, [9, 7, 0]) + np.testing.assert_array_equal(pset.lon, [8, 6, 0]) assert pset.time[0:1] == timedelta_to_float(endtime - fieldset.time_interval.left) assert pset.time[2] == timedelta_to_float( start_times[2] - fieldset.time_interval.left @@ -411,7 +411,7 @@ def KernelCounter(particles, fieldset): # pragma: no cover pset = ParticleSet(fieldset, lon=np.zeros(1), lat=np.zeros(1)) pset.execute(KernelCounter, dt=np.timedelta64(2, "s"), runtime=np.timedelta64(5, "s")) - assert pset.lon == 4 + assert pset.lon == 3 assert pset.dt == 2 assert pset.time == 5 diff --git a/tests/test_particlesetview.py b/tests/test_particlesetview.py index d5d60161a..2f0532543 100644 --- a/tests/test_particlesetview.py +++ b/tests/test_particlesetview.py @@ -77,11 +77,11 @@ def ConditionalHeating(particles, fieldset): # pragma: no cover pset.execute(ConditionalHeating, runtime=np.timedelta64(4, "s"), dt=np.timedelta64(1, "s")) - # After 5 timesteps (0, 1, 2, 3, 4): left particles should be at 15.0, right at 7.5 + # After 4 timesteps: left particles should be at 14.0, right at 8.0 left_particles = pset.lon < 0.5 right_particles = pset.lon >= 0.5 - assert np.allclose(pset.temp[left_particles], 15.0, atol=1e-6) - assert np.allclose(pset.temp[right_particles], 7.5, atol=1e-6) + assert np.allclose(pset.temp[left_particles], 14.0, atol=1e-6) + assert np.allclose(pset.temp[right_particles], 8.0, atol=1e-6) def test_particle_mask_progressive_changes(fieldset): @@ -134,9 +134,9 @@ def MultiMaskOperations(particles, fieldset): # pragma: no cover group2 = (pset.lon >= 0.33) & (pset.lon < 0.67) group3 = pset.lon >= 0.67 - assert np.allclose(pset.counter[group1], 6, atol=1e-6) # 6 timesteps * 1 - assert np.allclose(pset.counter[group2], 12, atol=1e-6) # 6 timesteps * 2 - assert np.allclose(pset.counter[group3], 18, atol=1e-6) # 6 timesteps * 3 + assert np.allclose(pset.counter[group1], 5, atol=1e-6) # 5 timesteps * 1 + assert np.allclose(pset.counter[group2], 10, atol=1e-6) # 5 timesteps * 2 + assert np.allclose(pset.counter[group3], 15, atol=1e-6) # 5 timesteps * 3 def test_particle_mask_empty_mask_handling(fieldset): @@ -178,5 +178,5 @@ def MoveLon(particles, fieldset): # pragma: no cover # Should have deleted edge particles assert pset.size < initial_size - # Remaining particles should be in the middle range - assert np.all((pset.lon >= 0.2) & (pset.lon <= 0.8)) + # Remaining particles should be in the middle range (with 0.02 of total displacement) + assert np.all((pset.lon >= 0.2) & (pset.lon <= 0.82)) diff --git a/tests/test_uxadvection.py b/tests/test_uxadvection.py new file mode 100644 index 000000000..e6494d888 --- /dev/null +++ b/tests/test_uxadvection.py @@ -0,0 +1,25 @@ +import numpy as np +import pytest + +import parcels +from parcels._datasets.unstructured.generic import datasets as datasets_unstructured +from parcels.kernels import ( + AdvectionEE, + AdvectionRK2, + AdvectionRK4, +) + + +@pytest.mark.parametrize("integrator", [AdvectionEE, AdvectionRK2, AdvectionRK4]) +def test_ux_constant_flow_face_centered_2D(integrator): + ds = datasets_unstructured["ux_constant_flow_face_centered_2D"] + T = np.timedelta64(3600, "s") + dt = np.timedelta64(300, "s") + dt_s = 300.0 + + fieldset = parcels.FieldSet.from_ugrid_conventions(ds, mesh="flat") + pset = parcels.ParticleSet(fieldset, lon=[5.0], lat=[5.0]) + pfile = parcels.ParticleFile(store="test.zarr", outputdt=dt) + pset.execute(integrator, runtime=T, dt=dt, output_file=pfile, verbose_progress=False) + expected_lon = 8.6 + np.testing.assert_allclose(pset.lon, expected_lon, atol=1e-5)