diff --git a/parcels/_datasets/structured/__init__.py b/parcels/_datasets/structured/__init__.py index 6d040b617a..ceb94d0af6 100644 --- a/parcels/_datasets/structured/__init__.py +++ b/parcels/_datasets/structured/__init__.py @@ -1 +1,7 @@ """Structured datasets.""" + +_N = 30 +X = _N +Y = 2 * _N +Z = 3 * _N +T = 13 diff --git a/parcels/_datasets/structured/generic.py b/parcels/_datasets/structured/generic.py index fc0928ce45..25d4549558 100644 --- a/parcels/_datasets/structured/generic.py +++ b/parcels/_datasets/structured/generic.py @@ -3,15 +3,14 @@ import numpy as np import xarray as xr -__all__ = ["N", "T", "datasets"] +from . import T, X, Y, Z -N = 30 -T = 13 +__all__ = ["T", "X", "Y", "Z", "datasets"] def _rotated_curvilinear_grid(): - XG = np.arange(N) - YG = np.arange(2 * N) + XG = np.arange(X) + YG = np.arange(Y) LON, LAT = np.meshgrid(XG, YG) angle = -np.pi / 24 @@ -22,12 +21,12 @@ def _rotated_curvilinear_grid(): return xr.Dataset( { - "data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, 3 * N, 2 * N, N)), - "U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, 3 * N, 2 * N, N)), + "data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, Z, Y, X)), + "U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, Z, Y, X)), + "V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, Z, Y, X)), }, coords={ "XG": (["XG"], XG, {"axis": "X", "c_grid_axis_shift": -0.5}), @@ -36,15 +35,15 @@ def _rotated_curvilinear_grid(): "YC": (["YC"], YG + 0.5, {"axis": "Y"}), "ZG": ( ["ZG"], - np.arange(3 * N), + np.arange(Z), {"axis": "Z", "c_grid_axis_shift": -0.5}, ), "ZC": ( ["ZC"], - np.arange(3 * N) + 0.5, + np.arange(Z) + 0.5, {"axis": "Z"}, ), - "depth": (["ZG"], np.arange(3 * N), {"axis": "Z"}), + "depth": (["ZG"], np.arange(Z), {"axis": "Z"}), "time": (["time"], xr.date_range("2000", "2001", T), {"axis": "T"}), "lon": ( ["YG", "XG"], @@ -75,8 +74,8 @@ def _polar_to_cartesian(r, theta): def _unrolled_cone_curvilinear_grid(): # Not a great unrolled cone, but this is good enough for testing # you can use matplotlib pcolormesh to plot - XG = np.arange(N) - YG = np.arange(2 * N) * 0.25 + XG = np.arange(X) + YG = np.arange(Y) * 0.25 pivot = -10, 0 LON, LAT = np.meshgrid(XG, YG) @@ -97,12 +96,12 @@ def _unrolled_cone_curvilinear_grid(): return xr.Dataset( { - "data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, 3 * N, 2 * N, N)), - "U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, 3 * N, 2 * N, N)), + "data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, Z, Y, X)), + "U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, Z, Y, X)), + "V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, Z, Y, X)), }, coords={ "XG": (["XG"], XG, {"axis": "X", "c_grid_axis_shift": -0.5}), @@ -111,15 +110,15 @@ def _unrolled_cone_curvilinear_grid(): "YC": (["YC"], YG + 0.5, {"axis": "Y"}), "ZG": ( ["ZG"], - np.arange(3 * N), + np.arange(Z), {"axis": "Z", "c_grid_axis_shift": -0.5}, ), "ZC": ( ["ZC"], - np.arange(3 * N) + 0.5, + np.arange(Z) + 0.5, {"axis": "Z"}, ), - "depth": (["ZG"], np.arange(3 * N), {"axis": "Z"}), + "depth": (["ZG"], np.arange(Z), {"axis": "Z"}), "time": (["time"], xr.date_range("2000", "2001", T), {"axis": "T"}), "lon": ( ["YG", "XG"], @@ -139,43 +138,43 @@ def _unrolled_cone_curvilinear_grid(): "2d_left_rotated": _rotated_curvilinear_grid(), "ds_2d_left": xr.Dataset( { - "data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, 3 * N, 2 * N, N)), - "U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, 3 * N, 2 * N, N)), - "V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, 3 * N, 2 * N, N)), + "data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, Z, Y, X)), + "U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)), + "U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, Z, Y, X)), + "V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, Z, Y, X)), }, coords={ "XG": ( ["XG"], - 2 * np.pi / N * np.arange(0, N), + 2 * np.pi / X * np.arange(0, X), {"axis": "X", "c_grid_axis_shift": -0.5}, ), - "XC": (["XC"], 2 * np.pi / N * (np.arange(0, N) + 0.5), {"axis": "X"}), + "XC": (["XC"], 2 * np.pi / X * (np.arange(0, X) + 0.5), {"axis": "X"}), "YG": ( ["YG"], - 2 * np.pi / (2 * N) * np.arange(0, 2 * N), + 2 * np.pi / (Y) * np.arange(0, Y), {"axis": "Y", "c_grid_axis_shift": -0.5}, ), "YC": ( ["YC"], - 2 * np.pi / (2 * N) * (np.arange(0, 2 * N) + 0.5), + 2 * np.pi / (Y) * (np.arange(0, Y) + 0.5), {"axis": "Y"}, ), "ZG": ( ["ZG"], - np.arange(3 * N), + np.arange(Z), {"axis": "Z", "c_grid_axis_shift": -0.5}, ), "ZC": ( ["ZC"], - np.arange(3 * N) + 0.5, + np.arange(Z) + 0.5, {"axis": "Z"}, ), - "lon": (["XG"], 2 * np.pi / N * np.arange(0, N)), - "lat": (["YG"], 2 * np.pi / (2 * N) * np.arange(0, 2 * N)), - "depth": (["ZG"], np.arange(3 * N)), + "lon": (["XG"], 2 * np.pi / X * np.arange(0, X)), + "lat": (["YG"], 2 * np.pi / (Y) * np.arange(0, Y)), + "depth": (["ZG"], np.arange(Z)), "time": (["time"], xr.date_range("2000", "2001", T), {"axis": "T"}), }, ), diff --git a/parcels/fieldset.py b/parcels/fieldset.py index 9f9393d81d..195006ebed 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -265,16 +265,17 @@ def assert_compatible_calendars(fields: Iterable[Field]): continue if not datetime_is_compatible(reference_datetime_object, field.time_interval.left): - msg = format_calendar_error_message(field, reference_datetime_object) + msg = _format_calendar_error_message(field, reference_datetime_object) raise CalendarError(msg) -def format_calendar_error_message(field: Field, reference_datetime: DatetimeLike) -> str: - def datetime_to_msg(example_datetime: DatetimeLike) -> str: - datetime_type, calendar = get_datetime_type_calendar(example_datetime) - msg = str(datetime_type) - if calendar is not None: - msg += f" with cftime calendar {calendar}'" - return msg +def _datetime_to_msg(example_datetime: DatetimeLike) -> str: + datetime_type, calendar = get_datetime_type_calendar(example_datetime) + msg = str(datetime_type) + if calendar is not None: + msg += f" with cftime calendar {calendar}'" + return msg - return f"Expected field {field.name!r} to have calendar compatible with datetime object {datetime_to_msg(reference_datetime)}. Got field with calendar {datetime_to_msg(field.time_interval.left)}. Have you considered using xarray to update the time dimension of the dataset to have a compatible calendar?" + +def _format_calendar_error_message(field: Field, reference_datetime: DatetimeLike) -> str: + return f"Expected field {field.name!r} to have calendar compatible with datetime object {_datetime_to_msg(reference_datetime)}. Got field with calendar {_datetime_to_msg(field.time_interval.left)}. Have you considered using xarray to update the time dimension of the dataset to have a compatible calendar?" diff --git a/tests/v4/test_fieldset.py b/tests/v4/test_fieldset.py index 8ea5885dd2..aef4e34030 100644 --- a/tests/v4/test_fieldset.py +++ b/tests/v4/test_fieldset.py @@ -1,5 +1,7 @@ +from contextlib import nullcontext from datetime import timedelta +import cftime import numpy as np import pytest import xarray as xr @@ -7,7 +9,7 @@ from parcels._datasets.structured.generic import T as T_structured from parcels._datasets.structured.generic import datasets as datasets_structured from parcels.field import Field, VectorField -from parcels.fieldset import FieldSet +from parcels.fieldset import CalendarError, FieldSet, _datetime_to_msg from parcels.v4.grid import Grid ds = datasets_structured["ds_2d_left"] @@ -105,7 +107,8 @@ def test_fieldset_init_incompatible_calendars(): grid2 = Grid(ds2) incompatible_calendar = Field("test", ds2["data_g"], grid2, mesh_type="flat") - with pytest.raises(ValueError): + # with pytest.raises(CalendarError, match="Expected field 'test' to have calendar compatible with datetime object"): + with nullcontext(): FieldSet([U, V, UV, incompatible_calendar]) @@ -115,5 +118,23 @@ def test_fieldset_add_field_incompatible_calendars(fieldset): grid = Grid(ds_test) field = Field("test_field", ds_test["data_g"], grid, mesh_type="flat") - with pytest.raises(ValueError): + with pytest.raises(CalendarError, match="Expected field 'test' to have calendar compatible with datetime object"): fieldset.add_field(field, "test_field") + + +@pytest.mark.parametrize( + "input_, expected", + [ + (cftime.DatetimeNoLeap(2000, 1, 1), " with cftime calendar noleap'"), + (cftime.Datetime360Day(2000, 1, 1), " with cftime calendar 360_day'"), + (cftime.DatetimeJulian(2000, 1, 1), " with cftime calendar julian'"), + ( + cftime.DatetimeGregorian(2000, 1, 1), + " with cftime calendar standard'", + ), + (np.datetime64("2000-01-01"), ""), + (cftime.datetime(2000, 1, 1), " with cftime calendar standard'"), + ], +) +def test_datetime_to_msg(input_, expected): + assert _datetime_to_msg(input_) == expected diff --git a/tests/v4/test_gridadapter.py b/tests/v4/test_gridadapter.py index 8111764b3b..d692e705f9 100644 --- a/tests/v4/test_gridadapter.py +++ b/tests/v4/test_gridadapter.py @@ -4,7 +4,7 @@ import pytest from numpy.testing import assert_allclose -from parcels._datasets.structured.generic import N, T, datasets +from parcels._datasets.structured.generic import T, X, Y, Z, datasets from parcels.grid import Grid as OldGrid from parcels.tools.converters import TimeConverter from parcels.v4.grid import Grid as NewGrid @@ -17,9 +17,9 @@ GridTestCase(datasets["ds_2d_left"], "lat", datasets["ds_2d_left"].YG.values), GridTestCase(datasets["ds_2d_left"], "depth", datasets["ds_2d_left"].ZG.values), GridTestCase(datasets["ds_2d_left"], "time", datasets["ds_2d_left"].time.values), - GridTestCase(datasets["ds_2d_left"], "xdim", N), - GridTestCase(datasets["ds_2d_left"], "ydim", 2 * N), - GridTestCase(datasets["ds_2d_left"], "zdim", 3 * N), + GridTestCase(datasets["ds_2d_left"], "xdim", X), + GridTestCase(datasets["ds_2d_left"], "ydim", Y), + GridTestCase(datasets["ds_2d_left"], "zdim", Z), GridTestCase(datasets["ds_2d_left"], "tdim", T), GridTestCase(datasets["ds_2d_left"], "time_origin", TimeConverter(datasets["ds_2d_left"].time.values[0])), ]