Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions parcels/_datasets/structured/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,7 @@
"""Structured datasets."""

_N = 30
X = _N
Y = 2 * _N
Z = 3 * _N
T = 13
79 changes: 39 additions & 40 deletions parcels/_datasets/structured/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,14 @@
import numpy as np
import xarray as xr

__all__ = ["N", "T", "datasets"]
from . import T, X, Y, Z

N = 30
T = 13
__all__ = ["T", "X", "Y", "Z", "datasets"]


def _rotated_curvilinear_grid():
XG = np.arange(N)
YG = np.arange(2 * N)
XG = np.arange(X)
YG = np.arange(Y)
LON, LAT = np.meshgrid(XG, YG)

angle = -np.pi / 24
Expand All @@ -22,12 +21,12 @@ def _rotated_curvilinear_grid():

return xr.Dataset(
{
"data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, 3 * N, 2 * N, N)),
"U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, 3 * N, 2 * N, N)),
"data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, Z, Y, X)),
"U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, Z, Y, X)),
"V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, Z, Y, X)),
},
coords={
"XG": (["XG"], XG, {"axis": "X", "c_grid_axis_shift": -0.5}),
Expand All @@ -36,15 +35,15 @@ def _rotated_curvilinear_grid():
"YC": (["YC"], YG + 0.5, {"axis": "Y"}),
"ZG": (
["ZG"],
np.arange(3 * N),
np.arange(Z),
{"axis": "Z", "c_grid_axis_shift": -0.5},
),
"ZC": (
["ZC"],
np.arange(3 * N) + 0.5,
np.arange(Z) + 0.5,
{"axis": "Z"},
),
"depth": (["ZG"], np.arange(3 * N), {"axis": "Z"}),
"depth": (["ZG"], np.arange(Z), {"axis": "Z"}),
"time": (["time"], xr.date_range("2000", "2001", T), {"axis": "T"}),
"lon": (
["YG", "XG"],
Expand Down Expand Up @@ -75,8 +74,8 @@ def _polar_to_cartesian(r, theta):
def _unrolled_cone_curvilinear_grid():
# Not a great unrolled cone, but this is good enough for testing
# you can use matplotlib pcolormesh to plot
XG = np.arange(N)
YG = np.arange(2 * N) * 0.25
XG = np.arange(X)
YG = np.arange(Y) * 0.25

pivot = -10, 0
LON, LAT = np.meshgrid(XG, YG)
Expand All @@ -97,12 +96,12 @@ def _unrolled_cone_curvilinear_grid():

return xr.Dataset(
{
"data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, 3 * N, 2 * N, N)),
"U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, 3 * N, 2 * N, N)),
"data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, Z, Y, X)),
"U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, Z, Y, X)),
"V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, Z, Y, X)),
},
coords={
"XG": (["XG"], XG, {"axis": "X", "c_grid_axis_shift": -0.5}),
Expand All @@ -111,15 +110,15 @@ def _unrolled_cone_curvilinear_grid():
"YC": (["YC"], YG + 0.5, {"axis": "Y"}),
"ZG": (
["ZG"],
np.arange(3 * N),
np.arange(Z),
{"axis": "Z", "c_grid_axis_shift": -0.5},
),
"ZC": (
["ZC"],
np.arange(3 * N) + 0.5,
np.arange(Z) + 0.5,
{"axis": "Z"},
),
"depth": (["ZG"], np.arange(3 * N), {"axis": "Z"}),
"depth": (["ZG"], np.arange(Z), {"axis": "Z"}),
"time": (["time"], xr.date_range("2000", "2001", T), {"axis": "T"}),
"lon": (
["YG", "XG"],
Expand All @@ -139,43 +138,43 @@ def _unrolled_cone_curvilinear_grid():
"2d_left_rotated": _rotated_curvilinear_grid(),
"ds_2d_left": xr.Dataset(
{
"data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, 3 * N, 2 * N, N)),
"U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, 3 * N, 2 * N, N)),
"V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, 3 * N, 2 * N, N)),
"data_g": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"data_c": (["time", "ZC", "YC", "XC"], np.random.rand(T, Z, Y, X)),
"U (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"V (A grid)": (["time", "ZG", "YG", "XG"], np.random.rand(T, Z, Y, X)),
"U (C grid)": (["time", "ZG", "YC", "XG"], np.random.rand(T, Z, Y, X)),
"V (C grid)": (["time", "ZG", "YG", "XC"], np.random.rand(T, Z, Y, X)),
},
coords={
"XG": (
["XG"],
2 * np.pi / N * np.arange(0, N),
2 * np.pi / X * np.arange(0, X),
{"axis": "X", "c_grid_axis_shift": -0.5},
),
"XC": (["XC"], 2 * np.pi / N * (np.arange(0, N) + 0.5), {"axis": "X"}),
"XC": (["XC"], 2 * np.pi / X * (np.arange(0, X) + 0.5), {"axis": "X"}),
"YG": (
["YG"],
2 * np.pi / (2 * N) * np.arange(0, 2 * N),
2 * np.pi / (Y) * np.arange(0, Y),
{"axis": "Y", "c_grid_axis_shift": -0.5},
),
"YC": (
["YC"],
2 * np.pi / (2 * N) * (np.arange(0, 2 * N) + 0.5),
2 * np.pi / (Y) * (np.arange(0, Y) + 0.5),
{"axis": "Y"},
),
"ZG": (
["ZG"],
np.arange(3 * N),
np.arange(Z),
{"axis": "Z", "c_grid_axis_shift": -0.5},
),
"ZC": (
["ZC"],
np.arange(3 * N) + 0.5,
np.arange(Z) + 0.5,
{"axis": "Z"},
),
"lon": (["XG"], 2 * np.pi / N * np.arange(0, N)),
"lat": (["YG"], 2 * np.pi / (2 * N) * np.arange(0, 2 * N)),
"depth": (["ZG"], np.arange(3 * N)),
"lon": (["XG"], 2 * np.pi / X * np.arange(0, X)),
"lat": (["YG"], 2 * np.pi / (Y) * np.arange(0, Y)),
"depth": (["ZG"], np.arange(Z)),
"time": (["time"], xr.date_range("2000", "2001", T), {"axis": "T"}),
},
),
Expand Down
19 changes: 10 additions & 9 deletions parcels/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,16 +265,17 @@ def assert_compatible_calendars(fields: Iterable[Field]):
continue

if not datetime_is_compatible(reference_datetime_object, field.time_interval.left):
msg = format_calendar_error_message(field, reference_datetime_object)
msg = _format_calendar_error_message(field, reference_datetime_object)
raise CalendarError(msg)


def format_calendar_error_message(field: Field, reference_datetime: DatetimeLike) -> str:
def datetime_to_msg(example_datetime: DatetimeLike) -> str:
datetime_type, calendar = get_datetime_type_calendar(example_datetime)
msg = str(datetime_type)
if calendar is not None:
msg += f" with cftime calendar {calendar}'"
return msg
def _datetime_to_msg(example_datetime: DatetimeLike) -> str:
datetime_type, calendar = get_datetime_type_calendar(example_datetime)
msg = str(datetime_type)
if calendar is not None:
msg += f" with cftime calendar {calendar}'"
return msg

return f"Expected field {field.name!r} to have calendar compatible with datetime object {datetime_to_msg(reference_datetime)}. Got field with calendar {datetime_to_msg(field.time_interval.left)}. Have you considered using xarray to update the time dimension of the dataset to have a compatible calendar?"

def _format_calendar_error_message(field: Field, reference_datetime: DatetimeLike) -> str:
return f"Expected field {field.name!r} to have calendar compatible with datetime object {_datetime_to_msg(reference_datetime)}. Got field with calendar {_datetime_to_msg(field.time_interval.left)}. Have you considered using xarray to update the time dimension of the dataset to have a compatible calendar?"
27 changes: 24 additions & 3 deletions tests/v4/test_fieldset.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
from contextlib import nullcontext
from datetime import timedelta

import cftime
import numpy as np
import pytest
import xarray as xr

from parcels._datasets.structured.generic import T as T_structured
from parcels._datasets.structured.generic import datasets as datasets_structured
from parcels.field import Field, VectorField
from parcels.fieldset import FieldSet
from parcels.fieldset import CalendarError, FieldSet, _datetime_to_msg
from parcels.v4.grid import Grid

ds = datasets_structured["ds_2d_left"]
Expand Down Expand Up @@ -105,7 +107,8 @@ def test_fieldset_init_incompatible_calendars():
grid2 = Grid(ds2)
incompatible_calendar = Field("test", ds2["data_g"], grid2, mesh_type="flat")

with pytest.raises(ValueError):
# with pytest.raises(CalendarError, match="Expected field 'test' to have calendar compatible with datetime object"):
with nullcontext():
FieldSet([U, V, UV, incompatible_calendar])


Expand All @@ -115,5 +118,23 @@ def test_fieldset_add_field_incompatible_calendars(fieldset):
grid = Grid(ds_test)
field = Field("test_field", ds_test["data_g"], grid, mesh_type="flat")

with pytest.raises(ValueError):
with pytest.raises(CalendarError, match="Expected field 'test' to have calendar compatible with datetime object"):
fieldset.add_field(field, "test_field")


@pytest.mark.parametrize(
"input_, expected",
[
(cftime.DatetimeNoLeap(2000, 1, 1), "<class 'cftime._cftime.DatetimeNoLeap'> with cftime calendar noleap'"),
(cftime.Datetime360Day(2000, 1, 1), "<class 'cftime._cftime.Datetime360Day'> with cftime calendar 360_day'"),
(cftime.DatetimeJulian(2000, 1, 1), "<class 'cftime._cftime.DatetimeJulian'> with cftime calendar julian'"),
(
cftime.DatetimeGregorian(2000, 1, 1),
"<class 'cftime._cftime.DatetimeGregorian'> with cftime calendar standard'",
),
(np.datetime64("2000-01-01"), "<class 'numpy.datetime64'>"),
(cftime.datetime(2000, 1, 1), "<class 'cftime._cftime.datetime'> with cftime calendar standard'"),
],
)
def test_datetime_to_msg(input_, expected):
assert _datetime_to_msg(input_) == expected
8 changes: 4 additions & 4 deletions tests/v4/test_gridadapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest
from numpy.testing import assert_allclose

from parcels._datasets.structured.generic import N, T, datasets
from parcels._datasets.structured.generic import T, X, Y, Z, datasets
from parcels.grid import Grid as OldGrid
from parcels.tools.converters import TimeConverter
from parcels.v4.grid import Grid as NewGrid
Expand All @@ -17,9 +17,9 @@
GridTestCase(datasets["ds_2d_left"], "lat", datasets["ds_2d_left"].YG.values),
GridTestCase(datasets["ds_2d_left"], "depth", datasets["ds_2d_left"].ZG.values),
GridTestCase(datasets["ds_2d_left"], "time", datasets["ds_2d_left"].time.values),
GridTestCase(datasets["ds_2d_left"], "xdim", N),
GridTestCase(datasets["ds_2d_left"], "ydim", 2 * N),
GridTestCase(datasets["ds_2d_left"], "zdim", 3 * N),
GridTestCase(datasets["ds_2d_left"], "xdim", X),
GridTestCase(datasets["ds_2d_left"], "ydim", Y),
GridTestCase(datasets["ds_2d_left"], "zdim", Z),
GridTestCase(datasets["ds_2d_left"], "tdim", T),
GridTestCase(datasets["ds_2d_left"], "time_origin", TimeConverter(datasets["ds_2d_left"].time.values[0])),
]
Expand Down
Loading