From 8d025da073af22ebb846d2a18e83bc219a670988 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 15 Aug 2025 07:58:36 +0200 Subject: [PATCH 01/29] Start of implementing C-Grid interpolation --- parcels/application_kernels/interpolation.py | 90 +++++++++++++++++++- parcels/field.py | 10 +-- tests/v4/test_advection.py | 14 +-- 3 files changed, 102 insertions(+), 12 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 1df817a2a..ee70cea98 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -9,11 +9,13 @@ import xarray as xr if TYPE_CHECKING: - from parcels.field import Field + from parcels.field import Field, VectorField from parcels.uxgrid import _UXGRID_AXES from parcels.xgrid import _XGRID_AXES __all__ = [ + "CGrid_Tracer", + "CGrid_Velocity", "UXPiecewiseConstantFace", "UXPiecewiseLinearNode", "XLinear", @@ -111,6 +113,92 @@ def XLinear( return value.compute() if isinstance(value, dask.Array) else value +def CGrid_Velocity( + vectorfield: VectorField, + ti: int, + position: dict[_XGRID_AXES, tuple[int, float | np.ndarray]], + tau: np.float32 | np.float64, + t: np.float32 | np.float64, + z: np.float32 | np.float64, + y: np.float32 | np.float64, + x: np.float32 | np.float64, +): + """ + Interpolation kernel for velocity fields on a C-Grid. + Following Delandmeter and Van Sebille (2019), velocity fields should be interpolated + only in the direction of the grid cell faces. + """ + # xi, xsi = position["X"] + # yi, eta = position["Y"] + # zi, zeta = position["Z"] + + # U = vectorfield.U.data + # V = vectorfield.V.data + + # axis_dim = vectorfield.U.grid.get_axis_dim_mapping(U.dims) + + # lenT = 2 if np.any(tau > 0) else 1 + # lenZ = 2 if np.any(zeta > 0) else 1 + + value = (0, 0, 0) + return value + + +def CGrid_Tracer( + field: Field, + ti: int, + position: dict[_XGRID_AXES, tuple[int, float | np.ndarray]], + tau: np.float32 | np.float64, + t: np.float32 | np.float64, + z: np.float32 | np.float64, + y: np.float32 | np.float64, + x: np.float32 | np.float64, +): + """Interpolation kernel for tracer fields on a C-Grid. + + Following Delandmeter and Van Sebille (2019), tracer fields should be interpolated + constant over the grid cell + """ + xi, _ = position["X"] + yi, _ = position["Y"] + zi, _ = position["Z"] + + axis_dim = field.grid.get_axis_dim_mapping(field.data.dims) + data = field.data + + lenT = 2 if np.any(tau > 0) else 1 + + if lenT == 2: + ti_1 = np.clip(ti + 1, 0, data.shape[0] - 1) + ti = np.concatenate([np.repeat(ti), np.repeat(ti_1)]) + zi_1 = np.clip(zi + 1, 0, data.shape[1] - 1) + zi = np.concatenate([np.repeat(zi), np.repeat(zi_1)]) + yi_1 = np.clip(yi + 1, 0, data.shape[2] - 1) + yi = np.concatenate([np.repeat(yi), np.repeat(yi_1)]) + xi_1 = np.clip(xi + 1, 0, data.shape[3] - 1) + xi = np.concatenate([np.repeat(xi), np.repeat(xi_1)]) + + # Create DataArrays for indexing + selection_dict = { + axis_dim["X"]: xr.DataArray(xi, dims=("points")), + axis_dim["Y"]: xr.DataArray(yi, dims=("points")), + } + if "Z" in axis_dim: + selection_dict[axis_dim["Z"]] = xr.DataArray(zi, dims=("points")) + if "time" in field.data.dims: + selection_dict["time"] = xr.DataArray(ti, dims=("points")) + + value = data.isel(selection_dict).data.reshape(lenT, len(xi)) + + if lenT == 2: + tau = tau[:, np.newaxis] + value = value[0, :] * (1 - tau) + value[1, :] * tau + else: + value = value[0, :] + + return value.compute() if isinstance(value, dask.Array) else value + + def UXPiecewiseConstantFace( field: Field, ti: int, diff --git a/parcels/field.py b/parcels/field.py index 8c2a55c7a..b59b6f61b 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -16,7 +16,7 @@ VectorType, assert_valid_mesh, ) -from parcels.application_kernels.interpolation import UXPiecewiseLinearNode, XLinear, ZeroInterpolator +from parcels.application_kernels.interpolation import CGrid_Velocity, UXPiecewiseLinearNode, XLinear, ZeroInterpolator from parcels.particle import KernelParticle from parcels.particleset import ParticleSet from parcels.tools.converters import ( @@ -292,8 +292,8 @@ def __init__( if vector_interp_method is None: self._vector_interp_method = None else: - _assert_same_function_signature(vector_interp_method, ref=ZeroInterpolator) - self._interp_method = vector_interp_method + _assert_same_function_signature(vector_interp_method, ref=CGrid_Velocity) + self._vector_interp_method = vector_interp_method def __repr__(self): return f"""<{type(self).__name__}> @@ -308,7 +308,7 @@ def vector_interp_method(self): @vector_interp_method.setter def vector_interp_method(self, method: Callable): - _assert_same_function_signature(method, ref=ZeroInterpolator) + _assert_same_function_signature(method, ref=CGrid_Velocity) self._vector_interp_method = method def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): @@ -333,7 +333,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): if "3D" in self.vector_type: w = self.W._interp_method(self.W, ti, position, tau, time, z, y, x) else: - (u, v, w) = self._vector_interp_method(self, ti, position, time, z, y, x) + (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x) if applyConversion: u = self.U.units.to_target(u, z, y, x) diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index 7b209ab30..eb182e06b 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -13,7 +13,7 @@ ) from parcels.application_kernels.advection import AdvectionEE, AdvectionRK4, AdvectionRK4_3D, AdvectionRK45 from parcels.application_kernels.advectiondiffusion import AdvectionDiffusionEM, AdvectionDiffusionM1 -from parcels.application_kernels.interpolation import XLinear +from parcels.application_kernels.interpolation import CGrid_Tracer, CGrid_Velocity, XLinear from parcels.field import Field, VectorField from parcels.fieldset import FieldSet from parcels.particle import Particle, Variable @@ -335,15 +335,17 @@ def truth_moving(x_0, y_0, t): ("RK45", 0.1), ], ) -@pytest.mark.parametrize("grid_type", ["A"]) # TODO also implement C-grid once available +@pytest.mark.parametrize("grid_type", ["A", "C"]) def test_stommelgyre_fieldset(method, rtol, grid_type): npart = 2 ds = stommel_gyre_dataset(grid_type=grid_type) grid = XGrid.from_dataset(ds) - U = Field("U", ds["U"], grid, interp_method=XLinear) - V = Field("V", ds["V"], grid, interp_method=XLinear) - P = Field("P", ds["P"], grid, interp_method=XLinear) - UV = VectorField("UV", U, V) + vector_interp_method = None if grid_type == "A" else CGrid_Velocity + tracer_interp_method = XLinear if grid_type == "A" else CGrid_Tracer + U = Field("U", ds["U"], grid) + V = Field("V", ds["V"], grid) + P = Field("P", ds["P"], grid, interp_method=tracer_interp_method) + UV = VectorField("UV", U, V, vector_interp_method=vector_interp_method) fieldset = FieldSet([U, V, P, UV]) dt = np.timedelta64(30, "m") From 50d9d52e0beaf06720e8ece10b76e6bfa64cd8df Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 15 Aug 2025 10:18:02 +0200 Subject: [PATCH 02/29] Moving C-grid velocity code from v3 to v4 --- parcels/application_kernels/interpolation.py | 156 +++++++++++++++++-- parcels/field.py | 2 +- parcels/tools/interpolation_utils.py | 22 +-- tests/v4/test_field.py | 2 +- 4 files changed, 155 insertions(+), 27 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index ee70cea98..3e134104a 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -8,6 +8,8 @@ import numpy as np import xarray as xr +import parcels.tools.interpolation_utils as i_u + if TYPE_CHECKING: from parcels.field import Field, VectorField from parcels.uxgrid import _UXGRID_AXES @@ -54,6 +56,7 @@ def XLinear( axis_dim = field.grid.get_axis_dim_mapping(field.data.dims) data = field.data + tdim, zdim, ydim, xdim = data.shape[0], data.shape[1], data.shape[2], data.shape[3] lenT = 2 if np.any(tau > 0) else 1 lenZ = 2 if np.any(zeta > 0) else 1 @@ -62,22 +65,22 @@ def XLinear( if lenT == 1: ti = np.repeat(ti, lenZ * 4) else: - ti_1 = np.clip(ti + 1, 0, data.shape[0] - 1) + ti_1 = np.clip(ti + 1, 0, tdim - 1) ti = np.concatenate([np.repeat(ti, lenZ * 4), np.repeat(ti_1, lenZ * 4)]) # Depth coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels if lenZ == 1: zi = np.repeat(zi, lenT * 4) else: - zi_1 = np.clip(zi + 1, 0, data.shape[1] - 1) + zi_1 = np.clip(zi + 1, 0, zdim - 1) zi = np.tile(np.array([zi, zi, zi, zi, zi_1, zi_1, zi_1, zi_1]).flatten(), lenT) # Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth - yi_1 = np.clip(yi + 1, 0, data.shape[2] - 1) + yi_1 = np.clip(yi + 1, 0, ydim - 1) yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT) * (lenZ)) # X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth - xi_1 = np.clip(xi + 1, 0, data.shape[3] - 1) + xi_1 = np.clip(xi + 1, 0, xdim - 1) xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT) * (lenZ)) # Create DataArrays for indexing @@ -122,26 +125,151 @@ def CGrid_Velocity( z: np.float32 | np.float64, y: np.float32 | np.float64, x: np.float32 | np.float64, + applyConversion: bool, ): """ Interpolation kernel for velocity fields on a C-Grid. Following Delandmeter and Van Sebille (2019), velocity fields should be interpolated only in the direction of the grid cell faces. """ - # xi, xsi = position["X"] - # yi, eta = position["Y"] - # zi, zeta = position["Z"] + xi, xsi = position["X"] + yi, eta = position["Y"] + zi, zeta = position["Z"] - # U = vectorfield.U.data - # V = vectorfield.V.data + U = vectorfield.U.data + V = vectorfield.V.data + grid = vectorfield.grid + tdim, zdim, ydim, xdim = U.shape[0], U.shape[1], U.shape[2], U.shape[3] - # axis_dim = vectorfield.U.grid.get_axis_dim_mapping(U.dims) + if grid.lon.ndim == 1: + px = np.array([grid.lon[xi], grid.lon[xi + 1], grid.lon[xi + 1], grid.lon[xi]]) + py = np.array([grid.lat[yi], grid.lat[yi], grid.lat[yi + 1], grid.lat[yi + 1]]) + else: + px = np.array([grid.lon[yi, xi], grid.lon[yi, xi + 1], grid.lon[yi + 1, xi + 1], grid.lon[yi + 1, xi]]) + py = np.array([grid.lat[yi, xi], grid.lat[yi, xi + 1], grid.lat[yi + 1, xi + 1], grid.lat[yi + 1, xi]]) + + if grid.mesh == "spherical": + px[0] = px[0] + 360 if px[0] < x - 225 else px[0] + px[0] = px[0] - 360 if px[0] > x + 225 else px[0] + px[1:] = np.where(px[1:] - px[0] > 180, px[1:] - 360, px[1:]) + px[1:] = np.where(-px[1:] + px[0] > 180, px[1:] + 360, px[1:]) + xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] + np.testing.assert_allclose(xx, x, atol=1e-4) + c1 = i_u._geodetic_distance(py[0], py[1], px[0], px[1], grid.mesh, np.dot(i_u.phi2D_lin(0.0, xsi), py)) + c2 = i_u._geodetic_distance(py[1], py[2], px[1], px[2], grid.mesh, np.dot(i_u.phi2D_lin(eta, 1.0), py)) + c3 = i_u._geodetic_distance(py[2], py[3], px[2], px[3], grid.mesh, np.dot(i_u.phi2D_lin(1.0, xsi), py)) + c4 = i_u._geodetic_distance(py[3], py[0], px[3], px[0], grid.mesh, np.dot(i_u.phi2D_lin(eta, 0.0), py)) - # lenT = 2 if np.any(tau > 0) else 1 - # lenZ = 2 if np.any(zeta > 0) else 1 + lenT = 2 if np.any(tau > 0) else 1 + lenZ = 2 if np.any(zeta > 0) else 1 + + # Create arrays of corner points for xarray.isel + # TODO C grid may not need all xi and yi cornerpoints, so could speed up here? - value = (0, 0, 0) - return value + # Time coordinates: 8 points at ti, then 8 points at ti+1 + if lenT == 1: + ti = np.repeat(ti, lenZ * 4) + else: + ti_1 = np.clip(ti + 1, 0, tdim - 1) + ti = np.concatenate([np.repeat(ti, lenZ * 4), np.repeat(ti_1, lenZ * 4)]) + + # Depth coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels + if lenZ == 1: + zi = np.repeat(zi, lenT * 4) + else: + zi_1 = np.clip(zi + 1, 0, zdim - 1) + zi = np.tile(np.array([zi, zi, zi, zi, zi_1, zi_1, zi_1, zi_1]).flatten(), lenT) + + # Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth + yi_1 = np.clip(yi + 1, 0, ydim - 1) + yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT) * (lenZ)) + + # X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth + xi_1 = np.clip(xi + 1, 0, xdim - 1) + xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT) * (lenZ)) + + for data in [U, V]: + axis_dim = grid.get_axis_dim_mapping(data.dims) + + # Create DataArrays for indexing + selection_dict = { + axis_dim["X"]: xr.DataArray(xi, dims=("points")), + axis_dim["Y"]: xr.DataArray(yi, dims=("points")), + } + if "Z" in axis_dim: + selection_dict[axis_dim["Z"]] = xr.DataArray(zi, dims=("points")) + if "time" in data.dims: + selection_dict["time"] = xr.DataArray(ti, dims=("points")) + + corner_data = data.isel(selection_dict).data.reshape(lenT, lenZ, len(xsi), 4) + + if lenT == 2: + tau = tau[np.newaxis, :, np.newaxis] + corner_data = corner_data[0, :, :, :] * (1 - tau) + corner_data[1, :, :, :] * tau + else: + corner_data = corner_data[0, :, :, :] + + if lenZ == 2: + zeta = zeta[:, np.newaxis] + corner_data = corner_data[0, :, :] * (1 - zeta) + corner_data[1, :, :] * zeta + else: + corner_data = corner_data[0, :, :] + # # See code below for v3 version + # # if self.gridindexingtype == "nemo": + # # U0 = self.U.data[ti, zi, yi + 1, xi] * c4 + # # U1 = self.U.data[ti, zi, yi + 1, xi + 1] * c2 + # # V0 = self.V.data[ti, zi, yi, xi + 1] * c1 + # # V1 = self.V.data[ti, zi, yi + 1, xi + 1] * c3 + # # elif self.gridindexingtype in ["mitgcm", "croco"]: + # # U0 = self.U.data[ti, zi, yi, xi] * c4 + # # U1 = self.U.data[ti, zi, yi, xi + 1] * c2 + # # V0 = self.V.data[ti, zi, yi, xi] * c1 + # # V1 = self.V.data[ti, zi, yi + 1, xi] * c3 + # # TODO Nick can you help use xgcm to fix this implementation? + + # # CROCO and MITgcm grid indexing, + # if data is U: + # U0 = corner_data[:, 0] * c4 + # U1 = corner_data[:, 1] * c2 + # elif data is V: + # V0 = corner_data[:, 0] * c1 + # V1 = corner_data[:, 2] * c3 + # # NEMO grid indexing + if data is U: + U0 = corner_data[:, 2] * c4 + U1 = corner_data[:, 3] * c2 + elif data is V: + V0 = corner_data[:, 1] * c1 + V1 = corner_data[:, 3] * c3 + + U = (1 - xsi) * U0 + xsi * U1 + V = (1 - eta) * V0 + eta * V1 + + deg2m = 1852 * 60.0 + if applyConversion: + meshJac = (deg2m * deg2m * np.cos(np.deg2rad(y))) if grid.mesh == "spherical" else 1 + else: + meshJac = deg2m if grid.mesh == "spherical" else 1 + + jac = i_u._compute_jacobian_determinant(py, px, eta, xsi) * meshJac + + u = ( + (-(1 - eta) * U - (1 - xsi) * V) * px[0] + + ((1 - eta) * U - xsi * V) * px[1] + + (eta * U + xsi * V) * px[2] + + (-eta * U + (1 - xsi) * V) * px[3] + ) / jac + v = ( + (-(1 - eta) * U - (1 - xsi) * V) * py[0] + + ((1 - eta) * U - xsi * V) * py[1] + + (eta * U + xsi * V) * py[2] + + (-eta * U + (1 - xsi) * V) * py[3] + ) / jac + if isinstance(u, dask.Array): + u = u.compute() + v = v.compute() + + return (u, v, 0) # TODO fix and test W also def CGrid_Tracer( diff --git a/parcels/field.py b/parcels/field.py index b59b6f61b..677816cdc 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -333,7 +333,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): if "3D" in self.vector_type: w = self.W._interp_method(self.W, ti, position, tau, time, z, y, x) else: - (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x) + (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x, applyConversion) if applyConversion: u = self.U.units.to_target(u, z, y, x) diff --git a/parcels/tools/interpolation_utils.py b/parcels/tools/interpolation_utils.py index 71d316e7c..85ad84e81 100644 --- a/parcels/tools/interpolation_utils.py +++ b/parcels/tools/interpolation_utils.py @@ -24,10 +24,10 @@ def phi1D_quad(xsi: float) -> list[float]: def phi2D_lin(eta: float, xsi: float) -> list[float]: - phi = [(1-xsi) * (1-eta), - xsi * (1-eta), - xsi * eta , - (1-xsi) * eta ] + phi = np.column_stack([(1-xsi) * (1-eta), + xsi * (1-eta), + xsi * eta , + (1-xsi) * eta ]) return phi @@ -185,12 +185,12 @@ def _geodetic_distance(lat1: float, lat2: float, lon1: float, lon2: float, mesh: def _compute_jacobian_determinant(py: np.ndarray, px: np.ndarray, eta: float, xsi: float) -> float: - dphidxsi = [eta - 1, 1 - eta, eta, -eta] - dphideta = [xsi - 1, -xsi, xsi, 1 - xsi] + dphidxsi = np.column_stack([eta - 1, 1 - eta, eta, -eta]) + dphideta = np.column_stack([xsi - 1, -xsi, xsi, 1 - xsi]) - dxdxsi = np.dot(px, dphidxsi) - dxdeta = np.dot(px, dphideta) - dydxsi = np.dot(py, dphidxsi) - dydeta = np.dot(py, dphideta) + dxdxsi = np.dot(dphidxsi, px) + dxdeta = np.dot(dphideta, px) + dydxsi = np.dot(dphidxsi, py) + dydeta = np.dot(dphideta, py) jac = dxdxsi * dydeta - dxdeta * dydxsi - return jac + return jac.trace() # TODO check how to properly vectorize this function (and not return only half of the Jacobian) diff --git a/tests/v4/test_field.py b/tests/v4/test_field.py index 6f1068896..91ab3df0f 100644 --- a/tests/v4/test_field.py +++ b/tests/v4/test_field.py @@ -133,7 +133,7 @@ def test_vectorfield_invalid_interpolator(): ds = datasets_structured["ds_2d_left"] grid = XGrid.from_dataset(ds) - def invalid_interpolator_wrong_signature(self, ti, position, tau, t, z, y, invalid): + def invalid_interpolator_wrong_signature(self, ti, position, tau, t, z, y, applyConversion, invalid): return 0.0 # Create component fields From 3226306ad416ae49cefe8cc0cbd19c683811b118 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 15 Aug 2025 11:51:29 +0200 Subject: [PATCH 03/29] use time_interval type to set default time --- parcels/particleset.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/parcels/particleset.py b/parcels/particleset.py index 871d151b3..828f85c5e 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -96,9 +96,11 @@ def __init__( assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts" if time is None or len(time) == 0: - time = type(fieldset.U.data.time[0].values)( - "NaT", "ns" - ) # do not set a time yet (because sign_dt not known) + # do not set a time yet (because sign_dt not known) + if fieldset.time_interval is None: + time = np.timedelta64("NaT", "ns") + else: + time = type(fieldset.time_interval.left)("NaT", "ns") elif type(time[0]) in [np.datetime64, np.timedelta64]: pass # already in the right format else: From 1bd7e0c15a5ca073ecb2d40fb83ba47c2592c1b2 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 15 Aug 2025 11:53:32 +0200 Subject: [PATCH 04/29] Adding nemo curvilinear test for C-grid --- tests/v4/test_advection.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index eb182e06b..320667776 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -406,3 +406,39 @@ def UpdateP(particle, fieldset, time): # pragma: no cover pset = ParticleSet(fieldset, pclass=SampleParticle, lon=start_lon, lat=start_lat, time=np.timedelta64(0, "s")) pset.execute([kernel[method], UpdateP], dt=dt, runtime=runtime) np.testing.assert_allclose(pset.p, pset.p_start, rtol=rtol) + + +def test_nemo_curvilinear_fieldset(): + data_folder = parcels.download_example_dataset("NemoCurvilinear_data") + files = data_folder.glob("*.nc4") + ds = xr.open_mfdataset(files, combine="nested", data_vars="minimal", coords="minimal", compat="override") + ds = ds.isel(time_counter=0, drop=True).drop_vars({"time"}).rename({"glamf": "lon", "gphif": "lat", "z": "depth"}) + + xgcm_grid = parcels.xgcm.Grid( + ds, + coords={ + "X": {"left": "x"}, + "Y": {"left": "y"}, + }, + periodic=False, + ) + grid = XGrid(xgcm_grid) + + U = parcels.Field("U", ds["U"], grid) + V = parcels.Field("V", ds["V"], grid) + U.units = parcels.GeographicPolar() + V.units = parcels.Geographic() + UV = parcels.VectorField("UV", U, V, vector_interp_method=CGrid_Velocity) + fieldset = parcels.FieldSet([U, V, UV]) + + npart = 20 + lonp = 30 * np.ones(npart) + latp = np.linspace(-70, 88, npart) + runtime = np.timedelta64(12, "h") # TODO increase to 160 days + + def periodicBC(particle, fieldSet, time): # pragma: no cover + particle.dlon = np.where(particle.lon > 180, particle.dlon - 360, particle.dlon) + + pset = parcels.ParticleSet(fieldset, lon=lonp, lat=latp) + pset.execute([AdvectionRK4, periodicBC], runtime=runtime, dt=np.timedelta64(6, "h")) + np.testing.assert_allclose(pset.lat_nextloop, latp, atol=1e-1) From 2da436a0d126502a2323f8e7c8dd18cee975cb73 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 15 Aug 2025 17:15:52 +0200 Subject: [PATCH 05/29] Speeding up curvilinear search by dask loading lon and lat --- parcels/xgrid.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index bac507d8e..3453dd2db 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -100,6 +100,10 @@ def __init__(self, grid: xgcm.Grid, mesh="flat"): self.mesh = mesh self._spatialhash = None ds = grid._ds + if hasattr(ds["lon"], "load"): + ds["lon"].load() + if hasattr(ds["lat"], "load"): + ds["lat"].load() if len(set(grid.axes) & {"X", "Y", "Z"}) > 0: # Only if spatial grid is >0D (see #2054 for further development) assert_valid_lat_lon(ds["lat"], ds["lon"], grid.axes) From 0e1a2210d328e7e090179d70503dae287d14c494 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 15 Aug 2025 17:16:50 +0200 Subject: [PATCH 06/29] Updating c-grid velocity test and algorithm --- parcels/tools/interpolation_utils.py | 3 ++- tests/v4/test_advection.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/parcels/tools/interpolation_utils.py b/parcels/tools/interpolation_utils.py index 85ad84e81..f09aa839f 100644 --- a/parcels/tools/interpolation_utils.py +++ b/parcels/tools/interpolation_utils.py @@ -193,4 +193,5 @@ def _compute_jacobian_determinant(py: np.ndarray, px: np.ndarray, eta: float, xs dydxsi = np.dot(dphidxsi, py) dydeta = np.dot(dphideta, py) jac = dxdxsi * dydeta - dxdeta * dydxsi - return jac.trace() # TODO check how to properly vectorize this function (and not return only half of the Jacobian) + # TODO check how to properly vectorize this function (and not return only half of the Jacobian) + return jac.diagonal() diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index 320667776..f624a156e 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -440,5 +440,5 @@ def periodicBC(particle, fieldSet, time): # pragma: no cover particle.dlon = np.where(particle.lon > 180, particle.dlon - 360, particle.dlon) pset = parcels.ParticleSet(fieldset, lon=lonp, lat=latp) - pset.execute([AdvectionRK4, periodicBC], runtime=runtime, dt=np.timedelta64(6, "h")) + pset.execute([AdvectionEE, periodicBC], runtime=runtime, dt=np.timedelta64(6, "h")) np.testing.assert_allclose(pset.lat_nextloop, latp, atol=1e-1) From 1ecab22cc4df7e5871287ab4d757c8bdee3593aa Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 18 Aug 2025 13:37:56 +0200 Subject: [PATCH 07/29] Fixing vector interpolation Using correct mesh_type and conversion Also had to change the yi-indexing. To explore why! --- parcels/application_kernels/interpolation.py | 18 +++++++++--------- parcels/field.py | 16 +++++++++++----- parcels/tools/interpolation_utils.py | 2 +- tests/v4/test_advection.py | 4 ++-- 4 files changed, 23 insertions(+), 17 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 3e134104a..c46f98feb 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -148,17 +148,17 @@ def CGrid_Velocity( px = np.array([grid.lon[yi, xi], grid.lon[yi, xi + 1], grid.lon[yi + 1, xi + 1], grid.lon[yi + 1, xi]]) py = np.array([grid.lat[yi, xi], grid.lat[yi, xi + 1], grid.lat[yi + 1, xi + 1], grid.lat[yi + 1, xi]]) - if grid.mesh == "spherical": + if vectorfield._mesh_type == "spherical": px[0] = px[0] + 360 if px[0] < x - 225 else px[0] px[0] = px[0] - 360 if px[0] > x + 225 else px[0] px[1:] = np.where(px[1:] - px[0] > 180, px[1:] - 360, px[1:]) px[1:] = np.where(-px[1:] + px[0] > 180, px[1:] + 360, px[1:]) xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] np.testing.assert_allclose(xx, x, atol=1e-4) - c1 = i_u._geodetic_distance(py[0], py[1], px[0], px[1], grid.mesh, np.dot(i_u.phi2D_lin(0.0, xsi), py)) - c2 = i_u._geodetic_distance(py[1], py[2], px[1], px[2], grid.mesh, np.dot(i_u.phi2D_lin(eta, 1.0), py)) - c3 = i_u._geodetic_distance(py[2], py[3], px[2], px[3], grid.mesh, np.dot(i_u.phi2D_lin(1.0, xsi), py)) - c4 = i_u._geodetic_distance(py[3], py[0], px[3], px[0], grid.mesh, np.dot(i_u.phi2D_lin(eta, 0.0), py)) + c1 = i_u._geodetic_distance(py[0], py[1], px[0], px[1], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(0.0, xsi), py)) + c2 = i_u._geodetic_distance(py[1], py[2], px[1], px[2], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(eta, 1.0), py)) + c3 = i_u._geodetic_distance(py[2], py[3], px[2], px[3], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(1.0, xsi), py)) + c4 = i_u._geodetic_distance(py[3], py[0], px[3], px[0], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(eta, 0.0), py)) lenT = 2 if np.any(tau > 0) else 1 lenZ = 2 if np.any(zeta > 0) else 1 @@ -181,8 +181,8 @@ def CGrid_Velocity( zi = np.tile(np.array([zi, zi, zi, zi, zi_1, zi_1, zi_1, zi_1]).flatten(), lenT) # Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth - yi_1 = np.clip(yi + 1, 0, ydim - 1) - yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT) * (lenZ)) + yi_minus_1 = np.clip(yi - 1, 0, ydim - 1) # TODO check why minus here!!! + yi = np.tile(np.repeat(np.column_stack([yi_minus_1, yi]), 2), (lenT) * (lenZ)) # X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth xi_1 = np.clip(xi + 1, 0, xdim - 1) @@ -247,9 +247,9 @@ def CGrid_Velocity( deg2m = 1852 * 60.0 if applyConversion: - meshJac = (deg2m * deg2m * np.cos(np.deg2rad(y))) if grid.mesh == "spherical" else 1 + meshJac = (deg2m * deg2m * np.cos(np.deg2rad(y))) if vectorfield._mesh_type == "spherical" else 1 else: - meshJac = deg2m if grid.mesh == "spherical" else 1 + meshJac = deg2m if vectorfield._mesh_type == "spherical" else 1 jac = i_u._compute_jacobian_determinant(py, px, eta, xsi) * meshJac diff --git a/parcels/field.py b/parcels/field.py index 677816cdc..26b1857c1 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -295,6 +295,10 @@ def __init__( _assert_same_function_signature(vector_interp_method, ref=CGrid_Velocity) self._vector_interp_method = vector_interp_method + if U._mesh_type != V._mesh_type or (W and U._mesh_type != W._mesh_type): + raise ValueError(f"Inconsistent mesh types: {U._mesh_type}, {V._mesh_type}, {W._mesh_type}") + self._mesh_type = U._mesh_type + def __repr__(self): return f"""<{type(self).__name__}> name: {self.name!r} @@ -332,14 +336,16 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): v = self.V._interp_method(self.V, ti, position, tau, time, z, y, x) if "3D" in self.vector_type: w = self.W._interp_method(self.W, ti, position, tau, time, z, y, x) + + if applyConversion: + u = self.U.units.to_target(u, z, y, x) + v = self.V.units.to_target(v, z, y, x) + else: (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x, applyConversion) - if applyConversion: - u = self.U.units.to_target(u, z, y, x) - v = self.V.units.to_target(v, z, y, x) - if "3D" in self.vector_type: - w = self.W.units.to_target(w, z, y, x) if self.W else 0.0 + if applyConversion and ("3D" in self.vector_type): + w = self.W.units.to_target(w, z, y, x) if self.W else 0.0 if "3D" in self.vector_type: return (u, v, w) diff --git a/parcels/tools/interpolation_utils.py b/parcels/tools/interpolation_utils.py index f09aa839f..10eb169a9 100644 --- a/parcels/tools/interpolation_utils.py +++ b/parcels/tools/interpolation_utils.py @@ -193,5 +193,5 @@ def _compute_jacobian_determinant(py: np.ndarray, px: np.ndarray, eta: float, xs dydxsi = np.dot(dphidxsi, py) dydeta = np.dot(dphideta, py) jac = dxdxsi * dydeta - dxdeta * dydxsi - # TODO check how to properly vectorize this function (and not return only half of the Jacobian) + # TODO check how to properly vectorize this function (ok to return diagonal of the Jacobian?) return jac.diagonal() diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index f624a156e..c82ffd565 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -424,8 +424,8 @@ def test_nemo_curvilinear_fieldset(): ) grid = XGrid(xgcm_grid) - U = parcels.Field("U", ds["U"], grid) - V = parcels.Field("V", ds["V"], grid) + U = parcels.Field("U", ds["U"], grid, mesh_type="spherical") + V = parcels.Field("V", ds["V"], grid, mesh_type="spherical") U.units = parcels.GeographicPolar() V.units = parcels.Geographic() UV = parcels.VectorField("UV", U, V, vector_interp_method=CGrid_Velocity) From 8b1b830a2e52ae070c1420f28e09cb9917ae164b Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 18 Aug 2025 15:59:41 +0200 Subject: [PATCH 08/29] Fixing CGrid_Velocity interpolation for multiple particles Using vectorized version of dot-product calculation as suggested by @michaeldenes in https://github.com/OceanParcels/Parcels/pull/2152#discussion_r2282456826 --- parcels/application_kernels/interpolation.py | 20 ++++++++++++++------ parcels/tools/interpolation_utils.py | 17 ++++++++--------- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index c46f98feb..12b4f9b5f 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -149,16 +149,24 @@ def CGrid_Velocity( py = np.array([grid.lat[yi, xi], grid.lat[yi, xi + 1], grid.lat[yi + 1, xi + 1], grid.lat[yi + 1, xi]]) if vectorfield._mesh_type == "spherical": - px[0] = px[0] + 360 if px[0] < x - 225 else px[0] - px[0] = px[0] - 360 if px[0] > x + 225 else px[0] + px[0] = np.where(px[0] < x - 225, px[0] + 360, px[0]) + px[0] = np.where(px[0] > x + 225, px[0] - 360, px[0]) px[1:] = np.where(px[1:] - px[0] > 180, px[1:] - 360, px[1:]) px[1:] = np.where(-px[1:] + px[0] > 180, px[1:] + 360, px[1:]) xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] np.testing.assert_allclose(xx, x, atol=1e-4) - c1 = i_u._geodetic_distance(py[0], py[1], px[0], px[1], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(0.0, xsi), py)) - c2 = i_u._geodetic_distance(py[1], py[2], px[1], px[2], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(eta, 1.0), py)) - c3 = i_u._geodetic_distance(py[2], py[3], px[2], px[3], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(1.0, xsi), py)) - c4 = i_u._geodetic_distance(py[3], py[0], px[3], px[0], vectorfield._mesh_type, np.dot(i_u.phi2D_lin(eta, 0.0), py)) + c1 = i_u._geodetic_distance( + py[0], py[1], px[0], px[1], vectorfield._mesh_type, np.einsum("ij,ji->i", i_u.phi2D_lin(0.0, xsi), py) + ) + c2 = i_u._geodetic_distance( + py[1], py[2], px[1], px[2], vectorfield._mesh_type, np.einsum("ij,ji->i", i_u.phi2D_lin(eta, 1.0), py) + ) + c3 = i_u._geodetic_distance( + py[2], py[3], px[2], px[3], vectorfield._mesh_type, np.einsum("ij,ji->i", i_u.phi2D_lin(1.0, xsi), py) + ) + c4 = i_u._geodetic_distance( + py[3], py[0], px[3], px[0], vectorfield._mesh_type, np.einsum("ij,ji->i", i_u.phi2D_lin(eta, 0.0), py) + ) lenT = 2 if np.any(tau > 0) else 1 lenZ = 2 if np.any(zeta > 0) else 1 diff --git a/parcels/tools/interpolation_utils.py b/parcels/tools/interpolation_utils.py index 10eb169a9..e4893a813 100644 --- a/parcels/tools/interpolation_utils.py +++ b/parcels/tools/interpolation_utils.py @@ -1,4 +1,3 @@ -import math from collections.abc import Callable from typing import Literal @@ -179,7 +178,7 @@ def _geodetic_distance(lat1: float, lat2: float, lon1: float, lon2: float, mesh: if mesh == "spherical": rad = np.pi / 180.0 deg2m = 1852 * 60.0 - return np.sqrt(((lon2 - lon1) * deg2m * math.cos(rad * lat)) ** 2 + ((lat2 - lat1) * deg2m) ** 2) + return np.sqrt(((lon2 - lon1) * deg2m * np.cos(rad * lat)) ** 2 + ((lat2 - lat1) * deg2m) ** 2) else: return np.sqrt((lon2 - lon1) ** 2 + (lat2 - lat1) ** 2) @@ -188,10 +187,10 @@ def _compute_jacobian_determinant(py: np.ndarray, px: np.ndarray, eta: float, xs dphidxsi = np.column_stack([eta - 1, 1 - eta, eta, -eta]) dphideta = np.column_stack([xsi - 1, -xsi, xsi, 1 - xsi]) - dxdxsi = np.dot(dphidxsi, px) - dxdeta = np.dot(dphideta, px) - dydxsi = np.dot(dphidxsi, py) - dydeta = np.dot(dphideta, py) - jac = dxdxsi * dydeta - dxdeta * dydxsi - # TODO check how to properly vectorize this function (ok to return diagonal of the Jacobian?) - return jac.diagonal() + dxdxsi_diag = np.einsum("ij,ji->i", dphidxsi, px) + dxdeta_diag = np.einsum("ij,ji->i", dphideta, px) + dydxsi_diag = np.einsum("ij,ji->i", dphidxsi, py) + dydeta_diag = np.einsum("ij,ji->i", dphideta, py) + + jac_diag = dxdxsi_diag * dydeta_diag - dxdeta_diag * dydxsi_diag + return jac_diag From cad34e0354053828a79bfa043302f75e69f92933 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 18 Aug 2025 17:27:35 +0200 Subject: [PATCH 09/29] Adding better error message handling for CGrid_velocity interpolation --- parcels/application_kernels/interpolation.py | 6 ++++-- parcels/field.py | 14 +++++++++++--- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 12b4f9b5f..1208b721e 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -153,8 +153,6 @@ def CGrid_Velocity( px[0] = np.where(px[0] > x + 225, px[0] - 360, px[0]) px[1:] = np.where(px[1:] - px[0] > 180, px[1:] - 360, px[1:]) px[1:] = np.where(-px[1:] + px[0] > 180, px[1:] + 360, px[1:]) - xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] - np.testing.assert_allclose(xx, x, atol=1e-4) c1 = i_u._geodetic_distance( py[0], py[1], px[0], px[1], vectorfield._mesh_type, np.einsum("ij,ji->i", i_u.phi2D_lin(0.0, xsi), py) ) @@ -277,6 +275,10 @@ def CGrid_Velocity( u = u.compute() v = v.compute() + # check whether the grid conversion has been applied correctly + xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] + u = np.where(np.abs(xx - x) > 1e-4, np.nan, u) + return (u, v, 0) # TODO fix and test W also diff --git a/parcels/field.py b/parcels/field.py index 26b1857c1..6a5a97b99 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -245,7 +245,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): tau, ti = _search_time_index(self, time) position = self.grid.search(z, y, x, ei=_ei) - _update_particle_states(particle, position) + _update_particle_states_position(particle, position) value = self._interp_method(self, ti, position, tau, time, z, y, x) @@ -329,7 +329,7 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): tau, ti = _search_time_index(self.U, time) position = self.grid.search(z, y, x, ei=_ei) - _update_particle_states(particle, position) + _update_particle_states_position(particle, position) if self._vector_interp_method is None: u = self.U._interp_method(self.U, ti, position, tau, time, z, y, x) @@ -344,6 +344,8 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): else: (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x, applyConversion) + _update_particle_states_velocity(particle, {"U": u}) + if applyConversion and ("3D" in self.vector_type): w = self.W.units.to_target(w, z, y, x) if self.W else 0.0 @@ -362,7 +364,7 @@ def __getitem__(self, key): return _deal_with_errors(error, key, vector_type=self.vector_type) -def _update_particle_states(particle, position): +def _update_particle_states_position(particle, position): """Update the particle states based on the position dictionary.""" if particle and "X" in position: # TODO also support uxgrid search particle.state = np.where(position["X"][0] < 0, StatusCode.ErrorOutOfBounds, particle.state) @@ -373,6 +375,12 @@ def _update_particle_states(particle, position): ) +def _update_particle_states_velocity(particle, velocities): + """Update the particle states based on the velocity dictionary.""" + if particle and "U" in velocities: + particle.state = np.where(np.isnan(velocities["U"]), StatusCode.Error, particle.state) + + def _assert_valid_uxdataarray(data: ux.UxDataArray): """Verifies that all the required attributes are present in the xarray.DataArray or uxarray.UxDataArray object. From 26cffcc1e7b11d71794203937047d8edc8bee10e Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 18 Aug 2025 17:28:51 +0200 Subject: [PATCH 10/29] Adding warning suppression for index_search Since np.where also calculates the parts that are not needed, these often throw a warning --- parcels/_index_search.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index f95c215b0..9187b12f9 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -293,14 +293,16 @@ def _search_indices_curvilinear_2d( cc = a[1] * b[0] - a[0] * b[1] + x * b[1] - y * a[1] det2 = bb * bb - 4 * aa * cc - det = np.where(det2 > 0, np.sqrt(det2), eta) - eta = np.where(abs(aa) < 1e-12, -cc / bb, np.where(det2 > 0, (-bb + det) / (2 * aa), eta)) + with np.errstate(divide="ignore", invalid="ignore"): + det = np.where(det2 > 0, np.sqrt(det2), eta) - xsi = np.where( - abs(a[1] + a[3] * eta) < 1e-12, - ((y - py[0]) / (py[1] - py[0]) + (y - py[3]) / (py[2] - py[3])) * 0.5, - (x - a[0] - a[2] * eta) / (a[1] + a[3] * eta), - ) + eta = np.where(abs(aa) < 1e-12, -cc / bb, np.where(det2 > 0, (-bb + det) / (2 * aa), eta)) + + xsi = np.where( + abs(a[1] + a[3] * eta) < 1e-12, + ((y - py[0]) / (py[1] - py[0]) + (y - py[3]) / (py[2] - py[3])) * 0.5, + (x - a[0] - a[2] * eta) / (a[1] + a[3] * eta), + ) xi = np.where(xsi < -tol, xi - 1, np.where(xsi > 1 + tol, xi + 1, xi)) yi = np.where(eta < -tol, yi - 1, np.where(eta > 1 + tol, yi + 1, yi)) From e2376e20101827a749ca92adedf6df43923b99c6 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 18 Aug 2025 17:31:41 +0200 Subject: [PATCH 11/29] Fixing error when Grid does not have lon or lat --- parcels/xgrid.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index 3453dd2db..03f0916b2 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -100,9 +100,9 @@ def __init__(self, grid: xgcm.Grid, mesh="flat"): self.mesh = mesh self._spatialhash = None ds = grid._ds - if hasattr(ds["lon"], "load"): + if "lon" in ds and hasattr(ds["lon"], "load"): ds["lon"].load() - if hasattr(ds["lat"], "load"): + if "lat" in ds and hasattr(ds["lat"], "load"): ds["lat"].load() if len(set(grid.axes) & {"X", "Y", "Z"}) > 0: # Only if spatial grid is >0D (see #2054 for further development) From 6529e132bee078e1309ae29b13f9fc05390e1d07 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 18 Aug 2025 17:51:53 +0200 Subject: [PATCH 12/29] Fixing to keep the maximum Error code in field --- parcels/field.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 6a5a97b99..85c3f0387 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -367,18 +367,28 @@ def __getitem__(self, key): def _update_particle_states_position(particle, position): """Update the particle states based on the position dictionary.""" if particle and "X" in position: # TODO also support uxgrid search - particle.state = np.where(position["X"][0] < 0, StatusCode.ErrorOutOfBounds, particle.state) - particle.state = np.where(position["Y"][0] < 0, StatusCode.ErrorOutOfBounds, particle.state) - particle.state = np.where(position["Z"][0] == RIGHT_OUT_OF_BOUNDS, StatusCode.ErrorOutOfBounds, particle.state) - particle.state = np.where( - position["Z"][0] == LEFT_OUT_OF_BOUNDS, StatusCode.ErrorThroughSurface, particle.state + particle.state = np.maximum( + np.where(position["X"][0] < 0, StatusCode.ErrorOutOfBounds, particle.state), particle.state + ) + particle.state = np.maximum( + np.where(position["Y"][0] < 0, StatusCode.ErrorOutOfBounds, particle.state), particle.state + ) + particle.state = np.maximum( + np.where(position["Z"][0] == RIGHT_OUT_OF_BOUNDS, StatusCode.ErrorOutOfBounds, particle.state), + particle.state, + ) + particle.state = np.maximum( + np.where(position["Z"][0] == LEFT_OUT_OF_BOUNDS, StatusCode.ErrorThroughSurface, particle.state), + particle.state, ) def _update_particle_states_velocity(particle, velocities): - """Update the particle states based on the velocity dictionary.""" + """Update the particle states based on the velocity dictionary, but only if state is not an Error already.""" if particle and "U" in velocities: - particle.state = np.where(np.isnan(velocities["U"]), StatusCode.Error, particle.state) + particle.state = np.maximum( + np.where(np.isnan(velocities["U"]), StatusCode.Error, particle.state), particle.state + ) def _assert_valid_uxdataarray(data: ux.UxDataArray): From dda469d2d153975fae88947036059f5d2862bb7b Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 21 Aug 2025 15:26:39 +0200 Subject: [PATCH 13/29] Adding NEMO3D test --- parcels/application_kernels/interpolation.py | 8 +-- tests/v4/test_advection.py | 57 ++++++++++++++++++++ 2 files changed, 61 insertions(+), 4 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 1208b721e..3208a60a8 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -210,14 +210,14 @@ def CGrid_Velocity( corner_data = data.isel(selection_dict).data.reshape(lenT, lenZ, len(xsi), 4) if lenT == 2: - tau = tau[np.newaxis, :, np.newaxis] - corner_data = corner_data[0, :, :, :] * (1 - tau) + corner_data[1, :, :, :] * tau + tau_full = tau[np.newaxis, :, np.newaxis] + corner_data = corner_data[0, :, :, :] * (1 - tau_full) + corner_data[1, :, :, :] * tau_full else: corner_data = corner_data[0, :, :, :] if lenZ == 2: - zeta = zeta[:, np.newaxis] - corner_data = corner_data[0, :, :] * (1 - zeta) + corner_data[1, :, :] * zeta + zeta_full = zeta[:, np.newaxis] + corner_data = corner_data[0, :, :] * (1 - zeta_full) + corner_data[1, :, :] * zeta_full else: corner_data = corner_data[0, :, :] # # See code below for v3 version diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index c82ffd565..551206502 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -442,3 +442,60 @@ def periodicBC(particle, fieldSet, time): # pragma: no cover pset = parcels.ParticleSet(fieldset, lon=lonp, lat=latp) pset.execute([AdvectionEE, periodicBC], runtime=runtime, dt=np.timedelta64(6, "h")) np.testing.assert_allclose(pset.lat_nextloop, latp, atol=1e-1) + + +def test_nemo_3D_curvilinear_fieldset(): + download_dir = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data") + ufiles = download_dir.glob("*U.nc") + dsu = xr.open_mfdataset(ufiles, decode_times=False, drop_variables=["nav_lat", "nav_lon"]) + dsu = dsu.rename({"time_counter": "time", "uo": "U"}) + + vfiles = download_dir.glob("*V.nc") + dsv = xr.open_mfdataset(vfiles, decode_times=False, drop_variables=["nav_lat", "nav_lon"]) + dsv = dsv.rename({"time_counter": "time", "vo": "V"}) + + wfiles = download_dir.glob("*W.nc") + dsw = xr.open_mfdataset(wfiles, decode_times=False, drop_variables=["nav_lat", "nav_lon"]) + dsw = dsw.rename({"time_counter": "time", "depthw": "depth", "wo": "W"}) + + dsu = dsu.assign_coords(depthu=dsw.depth.values) + dsu = dsu.rename({"depthu": "depth"}) + + dsv = dsv.assign_coords(depthv=dsw.depth.values) + dsv = dsv.rename({"depthv": "depth"}) + + coord_file = f"{download_dir}/coordinates.nc" + dscoord = xr.open_dataset(coord_file, decode_times=False).rename({"glamf": "lon", "gphif": "lat"}) + + ds = xr.merge([dsu, dsv, dsw, dscoord]) + + ds.load() # TODO remove for debug + + xgcm_grid = parcels.xgcm.Grid( + ds, + coords={ + "X": {"left": "x"}, + "Y": {"left": "y"}, + "Z": {"left": "depth"}, + "T": {"center": "time"}, + }, + periodic=False, + ) + grid = XGrid(xgcm_grid) + + U = parcels.Field("U", ds["U"], grid, mesh_type="spherical") + V = parcels.Field("V", ds["V"], grid, mesh_type="spherical") + W = parcels.Field("W", ds["W"], grid, mesh_type="spherical") + U.units = parcels.GeographicPolar() + V.units = parcels.Geographic() + W.units = parcels.Geographic() + UV = parcels.VectorField("UV", U, V, vector_interp_method=CGrid_Velocity) # TODO remove + UVW = parcels.VectorField("UVW", U, V, W, vector_interp_method=CGrid_Velocity) + fieldset = parcels.FieldSet([U, V, W, UV, UVW]) + + lons = np.linspace(1.9, 3.4, 10) + lats = np.linspace(52.5, 51.6, 10) + pset = parcels.ParticleSet(fieldset, lon=lons, lat=lats, depth=np.ones_like(lons)) + + pset.execute(parcels.AdvectionRK4, runtime=np.timedelta64(4, "D"), dt=np.timedelta64(6, "h")) + print(pset.depth) From 76d015bc678c7d89ae4e0f56d54c334027b1816a Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 09:03:02 +0200 Subject: [PATCH 14/29] Updating CGrid interpolation to not interpolate over depth for U and V --- parcels/application_kernels/interpolation.py | 36 ++++++++------------ 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 3208a60a8..e4acdbae2 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -139,7 +139,7 @@ def CGrid_Velocity( U = vectorfield.U.data V = vectorfield.V.data grid = vectorfield.grid - tdim, zdim, ydim, xdim = U.shape[0], U.shape[1], U.shape[2], U.shape[3] + tdim, ydim, xdim = U.shape[0], U.shape[2], U.shape[3] if grid.lon.ndim == 1: px = np.array([grid.lon[xi], grid.lon[xi + 1], grid.lon[xi + 1], grid.lon[xi]]) @@ -167,32 +167,30 @@ def CGrid_Velocity( ) lenT = 2 if np.any(tau > 0) else 1 - lenZ = 2 if np.any(zeta > 0) else 1 # Create arrays of corner points for xarray.isel # TODO C grid may not need all xi and yi cornerpoints, so could speed up here? # Time coordinates: 8 points at ti, then 8 points at ti+1 if lenT == 1: - ti = np.repeat(ti, lenZ * 4) + ti = np.repeat(ti, 4) else: ti_1 = np.clip(ti + 1, 0, tdim - 1) - ti = np.concatenate([np.repeat(ti, lenZ * 4), np.repeat(ti_1, lenZ * 4)]) + ti = np.concatenate([np.repeat(ti, 4), np.repeat(ti_1, 4)]) - # Depth coordinates: 4 points at zi, 4 at zi+1, repeated for both time levels - if lenZ == 1: - zi = np.repeat(zi, lenT * 4) - else: - zi_1 = np.clip(zi + 1, 0, zdim - 1) - zi = np.tile(np.array([zi, zi, zi, zi, zi_1, zi_1, zi_1, zi_1]).flatten(), lenT) + # Depth coordinates: 4 points at zi, repeated for both time levels + zi = np.repeat(zi, lenT * 4) # Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth - yi_minus_1 = np.clip(yi - 1, 0, ydim - 1) # TODO check why minus here!!! - yi = np.tile(np.repeat(np.column_stack([yi_minus_1, yi]), 2), (lenT) * (lenZ)) + yi_1 = np.clip(yi + 1, 0, ydim - 1) + yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT)) + # # TODO check why in some cases minus needed here!!! + # yi_minus_1 = np.clip(yi - 1, 0, ydim - 1) + # yi = np.tile(np.repeat(np.column_stack([yi_minus_1, yi]), 2), (lenT)) # X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth xi_1 = np.clip(xi + 1, 0, xdim - 1) - xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT) * (lenZ)) + xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT)) for data in [U, V]: axis_dim = grid.get_axis_dim_mapping(data.dims) @@ -207,17 +205,11 @@ def CGrid_Velocity( if "time" in data.dims: selection_dict["time"] = xr.DataArray(ti, dims=("points")) - corner_data = data.isel(selection_dict).data.reshape(lenT, lenZ, len(xsi), 4) + corner_data = data.isel(selection_dict).data.reshape(lenT, len(xsi), 4) if lenT == 2: - tau_full = tau[np.newaxis, :, np.newaxis] - corner_data = corner_data[0, :, :, :] * (1 - tau_full) + corner_data[1, :, :, :] * tau_full - else: - corner_data = corner_data[0, :, :, :] - - if lenZ == 2: - zeta_full = zeta[:, np.newaxis] - corner_data = corner_data[0, :, :] * (1 - zeta_full) + corner_data[1, :, :] * zeta_full + tau_full = tau[:, np.newaxis] + corner_data = corner_data[0, :, :] * (1 - tau_full) + corner_data[1, :, :] * tau_full else: corner_data = corner_data[0, :, :] # # See code below for v3 version From 196c6e76f7bf305af38ec9850ac238337bbb6c13 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 09:56:17 +0200 Subject: [PATCH 15/29] Fixing W interpolation for CGrid --- parcels/application_kernels/interpolation.py | 70 ++++++++++++++++---- tests/v4/test_advection.py | 49 +++++++++++--- 2 files changed, 99 insertions(+), 20 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index e4acdbae2..da07cfd1b 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -139,7 +139,7 @@ def CGrid_Velocity( U = vectorfield.U.data V = vectorfield.V.data grid = vectorfield.grid - tdim, ydim, xdim = U.shape[0], U.shape[2], U.shape[3] + tdim, zdim, ydim, xdim = U.shape[0], U.shape[1], U.shape[2], U.shape[3] if grid.lon.ndim == 1: px = np.array([grid.lon[xi], grid.lon[xi + 1], grid.lon[xi + 1], grid.lon[xi]]) @@ -171,39 +171,39 @@ def CGrid_Velocity( # Create arrays of corner points for xarray.isel # TODO C grid may not need all xi and yi cornerpoints, so could speed up here? - # Time coordinates: 8 points at ti, then 8 points at ti+1 + # Time coordinates: 4 points at ti, then 4 points at ti+1 if lenT == 1: - ti = np.repeat(ti, 4) + ti_full = np.repeat(ti, 4) else: ti_1 = np.clip(ti + 1, 0, tdim - 1) - ti = np.concatenate([np.repeat(ti, 4), np.repeat(ti_1, 4)]) + ti_full = np.concatenate([np.repeat(ti, 4), np.repeat(ti_1, 4)]) # Depth coordinates: 4 points at zi, repeated for both time levels - zi = np.repeat(zi, lenT * 4) + zi_full = np.repeat(zi, lenT * 4) # Y coordinates: [yi, yi, yi+1, yi+1] for each spatial point, repeated for time/depth yi_1 = np.clip(yi + 1, 0, ydim - 1) - yi = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT)) + yi_full = np.tile(np.repeat(np.column_stack([yi, yi_1]), 2), (lenT)) # # TODO check why in some cases minus needed here!!! # yi_minus_1 = np.clip(yi - 1, 0, ydim - 1) # yi = np.tile(np.repeat(np.column_stack([yi_minus_1, yi]), 2), (lenT)) # X coordinates: [xi, xi+1, xi, xi+1] for each spatial point, repeated for time/depth xi_1 = np.clip(xi + 1, 0, xdim - 1) - xi = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT)) + xi_full = np.tile(np.column_stack([xi, xi_1, xi, xi_1]).flatten(), (lenT)) for data in [U, V]: axis_dim = grid.get_axis_dim_mapping(data.dims) # Create DataArrays for indexing selection_dict = { - axis_dim["X"]: xr.DataArray(xi, dims=("points")), - axis_dim["Y"]: xr.DataArray(yi, dims=("points")), + axis_dim["X"]: xr.DataArray(xi_full, dims=("points")), + axis_dim["Y"]: xr.DataArray(yi_full, dims=("points")), } if "Z" in axis_dim: - selection_dict[axis_dim["Z"]] = xr.DataArray(zi, dims=("points")) + selection_dict[axis_dim["Z"]] = xr.DataArray(zi_full, dims=("points")) if "time" in data.dims: - selection_dict["time"] = xr.DataArray(ti, dims=("points")) + selection_dict["time"] = xr.DataArray(ti_full, dims=("points")) corner_data = data.isel(selection_dict).data.reshape(lenT, len(xsi), 4) @@ -271,7 +271,53 @@ def CGrid_Velocity( xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] u = np.where(np.abs(xx - x) > 1e-4, np.nan, u) - return (u, v, 0) # TODO fix and test W also + if vectorfield.W: + data = vectorfield.W.data + # Time coordinates: 2 points at ti, then 2 points at ti+1 + if lenT == 1: + ti_full = np.repeat(ti, 2) + else: + ti_1 = np.clip(ti + 1, 0, tdim - 1) + ti_full = np.concatenate([np.repeat(ti, 2), np.repeat(ti_1, 2)]) + + # Depth coordinates: 1 points at zi, repeated for both time levels + zi_1 = np.clip(zi + 1, 0, zdim - 1) + zi_full = np.tile(np.array([zi, zi_1]).flatten(), lenT) + + # Y coordinates: yi+1 for each spatial point, repeated for time/depth + yi_1 = np.clip(yi + 1, 0, ydim - 1) + yi_full = np.tile(yi_1, (lenT) * 2) + + # X coordinates: xi+1 for each spatial point, repeated for time/depth + xi_1 = np.clip(xi + 1, 0, xdim - 1) + xi_full = np.tile(xi_1, (lenT) * 2) + + axis_dim = grid.get_axis_dim_mapping(data.dims) + + # Create DataArrays for indexing + selection_dict = { + axis_dim["X"]: xr.DataArray(xi_full, dims=("points")), + axis_dim["Y"]: xr.DataArray(yi_full, dims=("points")), + axis_dim["Z"]: xr.DataArray(zi_full, dims=("points")), + } + if "time" in data.dims: + selection_dict["time"] = xr.DataArray(ti_full, dims=("points")) + + corner_data = data.isel(selection_dict).data.reshape(lenT, 2, len(xsi)) + + if lenT == 2: + tau_full = tau[np.newaxis, :] + corner_data = corner_data[0, :, :] * (1 - tau_full) + corner_data[1, :, :] * tau_full + else: + corner_data = corner_data[0, :, :] + + w = corner_data[0, :] * (1 - zeta) + corner_data[1, :] * zeta + if isinstance(w, dask.Array): + w = w.compute() + else: + w = np.zeros_like(u) + + return (u, v, w) def CGrid_Tracer( diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index 551206502..a350fd79f 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -20,6 +20,7 @@ from parcels.particleset import ParticleSet from parcels.tools.statuscodes import StatusCode from parcels.xgrid import XGrid +from tests.utils import round_and_hash_float_array kernel = { "EE": AdvectionEE, @@ -444,7 +445,8 @@ def periodicBC(particle, fieldSet, time): # pragma: no cover np.testing.assert_allclose(pset.lat_nextloop, latp, atol=1e-1) -def test_nemo_3D_curvilinear_fieldset(): +@pytest.mark.parametrize("method", ["RK4", "RK4_3D"]) +def test_nemo_3D_curvilinear_fieldset(method): download_dir = parcels.download_example_dataset("NemoNorthSeaORCA025-N006_data") ufiles = download_dir.glob("*U.nc") dsu = xr.open_mfdataset(ufiles, decode_times=False, drop_variables=["nav_lat", "nav_lon"]) @@ -466,10 +468,34 @@ def test_nemo_3D_curvilinear_fieldset(): coord_file = f"{download_dir}/coordinates.nc" dscoord = xr.open_dataset(coord_file, decode_times=False).rename({"glamf": "lon", "gphif": "lat"}) + dscoord = dscoord.isel(time=0, drop=True) ds = xr.merge([dsu, dsv, dsw, dscoord]) + ds = ds.drop_vars( + [ + "uos", + "vos", + "nav_lev", + "nav_lon", + "nav_lat", + "tauvo", + "tauuo", + "time_steps", + "gphiu", + "gphiv", + "gphit", + "glamu", + "glamv", + "glamt", + "time_centered_bounds", + "time_counter_bounds", + "time_centered", + ] + ) + ds = ds.drop_vars(["e1f", "e1t", "e1u", "e1v", "e2f", "e2t", "e2u", "e2v"]) + ds["time"] = [np.timedelta64(int(t), "s") + np.datetime64("1900-01-01") for t in ds["time"]] - ds.load() # TODO remove for debug + ds["W"] *= -1 # Invert W velocity xgcm_grid = parcels.xgcm.Grid( ds, @@ -488,14 +514,21 @@ def test_nemo_3D_curvilinear_fieldset(): W = parcels.Field("W", ds["W"], grid, mesh_type="spherical") U.units = parcels.GeographicPolar() V.units = parcels.Geographic() - W.units = parcels.Geographic() - UV = parcels.VectorField("UV", U, V, vector_interp_method=CGrid_Velocity) # TODO remove + UV = parcels.VectorField("UV", U, V, vector_interp_method=CGrid_Velocity) UVW = parcels.VectorField("UVW", U, V, W, vector_interp_method=CGrid_Velocity) fieldset = parcels.FieldSet([U, V, W, UV, UVW]) - lons = np.linspace(1.9, 3.4, 10) - lats = np.linspace(52.5, 51.6, 10) + npart = 10 + lons = np.linspace(1.9, 3.4, npart) + lats = np.linspace(52.5, 51.6, npart) pset = parcels.ParticleSet(fieldset, lon=lons, lat=lats, depth=np.ones_like(lons)) - pset.execute(parcels.AdvectionRK4, runtime=np.timedelta64(4, "D"), dt=np.timedelta64(6, "h")) - print(pset.depth) + pset.execute(kernel[method], runtime=np.timedelta64(4, "D"), dt=np.timedelta64(6, "h")) + + if method == "RK4": + np.testing.assert_equal(round_and_hash_float_array([p.lon for p in pset], decimals=5), 29977383852960156017546) + elif method == "RK4_3D": + # TODO check why decimals needs to be so low in RK4_3D (compare to v3) + np.testing.assert_equal( + round_and_hash_float_array([p.depth for p in pset], decimals=1), 29747210774230389239432 + ) From 1c948dd37e8a2f2ddb8962997e19d18e461fe193 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 10:29:08 +0200 Subject: [PATCH 16/29] Fixing stommel gyre CGrid interpolation test --- parcels/application_kernels/interpolation.py | 2 +- tests/v4/test_advection.py | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index da07cfd1b..5f412a528 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -269,7 +269,7 @@ def CGrid_Velocity( # check whether the grid conversion has been applied correctly xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] - u = np.where(np.abs(xx - x) > 1e-4, np.nan, u) + u = np.where(np.abs((xx - x) / x) > 1e-4, np.nan, u) if vectorfield.W: data = vectorfield.W.data diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index a350fd79f..e7291ea64 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -13,7 +13,7 @@ ) from parcels.application_kernels.advection import AdvectionEE, AdvectionRK4, AdvectionRK4_3D, AdvectionRK45 from parcels.application_kernels.advectiondiffusion import AdvectionDiffusionEM, AdvectionDiffusionM1 -from parcels.application_kernels.interpolation import CGrid_Tracer, CGrid_Velocity, XLinear +from parcels.application_kernels.interpolation import CGrid_Velocity, XLinear from parcels.field import Field, VectorField from parcels.fieldset import FieldSet from parcels.particle import Particle, Variable @@ -342,10 +342,9 @@ def test_stommelgyre_fieldset(method, rtol, grid_type): ds = stommel_gyre_dataset(grid_type=grid_type) grid = XGrid.from_dataset(ds) vector_interp_method = None if grid_type == "A" else CGrid_Velocity - tracer_interp_method = XLinear if grid_type == "A" else CGrid_Tracer U = Field("U", ds["U"], grid) V = Field("V", ds["V"], grid) - P = Field("P", ds["P"], grid, interp_method=tracer_interp_method) + P = Field("P", ds["P"], grid, interp_method=XLinear) UV = VectorField("UV", U, V, vector_interp_method=vector_interp_method) fieldset = FieldSet([U, V, P, UV]) From df78b628f7eceeec0b93223013ac1db066386ccb Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 10:47:41 +0200 Subject: [PATCH 17/29] Updating failing unit test --- tests/v4/test_advection.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index e7291ea64..fba5354af 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -412,7 +412,11 @@ def test_nemo_curvilinear_fieldset(): data_folder = parcels.download_example_dataset("NemoCurvilinear_data") files = data_folder.glob("*.nc4") ds = xr.open_mfdataset(files, combine="nested", data_vars="minimal", coords="minimal", compat="override") - ds = ds.isel(time_counter=0, drop=True).drop_vars({"time"}).rename({"glamf": "lon", "gphif": "lat", "z": "depth"}) + ds = ( + ds.isel(time_counter=0, drop=True) + .isel(time=0, drop=True) + .rename({"glamf": "lon", "gphif": "lat", "z": "depth"}) + ) xgcm_grid = parcels.xgcm.Grid( ds, From cd691474fc0e4929ea4bfc19f24cffd88597859d Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 10:59:29 +0200 Subject: [PATCH 18/29] Further fixing unit test by dropping unused dimensions --- tests/v4/test_advection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/v4/test_advection.py b/tests/v4/test_advection.py index fba5354af..9a244e1cc 100644 --- a/tests/v4/test_advection.py +++ b/tests/v4/test_advection.py @@ -415,6 +415,7 @@ def test_nemo_curvilinear_fieldset(): ds = ( ds.isel(time_counter=0, drop=True) .isel(time=0, drop=True) + .isel(z_a=0, drop=True) .rename({"glamf": "lon", "gphif": "lat", "z": "depth"}) ) From 1a64033254b903bce8b9e529fe4a33f25bce8388 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 12:12:49 +0200 Subject: [PATCH 19/29] Temporary fix to spatialhash as suggested in https://github.com/OceanParcels/Parcels/pull/2153#issuecomment-3199768760 --- parcels/spatialhash.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/parcels/spatialhash.py b/parcels/spatialhash.py index 57942610e..78b582c87 100644 --- a/parcels/spatialhash.py +++ b/parcels/spatialhash.py @@ -138,7 +138,9 @@ def _initialize_face_hash_table(self): nface = (self._source_grid.xdim) * (self._source_grid.ydim) for eid in range(nface): for j in range(yi1[eid], yi2[eid] + 1): - if xi1[eid] <= xi2[eid]: + if abs(xi1[eid] - xi2[eid]) > 225: + pass + elif xi1[eid] <= xi2[eid]: # Normal case, no wrap for i in range(xi1[eid], xi2[eid] + 1): index_to_face[(i % self._nx) + self._nx * j].append(eid) From 7c1a87afda59a46bd64f0dc921e6623bcfc101a7 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 12:22:42 +0200 Subject: [PATCH 20/29] Adding TODO statement about spherical meshes --- parcels/spatialhash.py | 1 + 1 file changed, 1 insertion(+) diff --git a/parcels/spatialhash.py b/parcels/spatialhash.py index 78b582c87..3e30c3f07 100644 --- a/parcels/spatialhash.py +++ b/parcels/spatialhash.py @@ -139,6 +139,7 @@ def _initialize_face_hash_table(self): for eid in range(nface): for j in range(yi1[eid], yi2[eid] + 1): if abs(xi1[eid] - xi2[eid]) > 225: + # TODO make sure this is only called when mesh_type is spherical; but requires #2155 pass elif xi1[eid] <= xi2[eid]: # Normal case, no wrap From 07a8238b5da493752a6f6b8e1dc4a2c23770447a Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Fri, 22 Aug 2025 14:18:54 +0200 Subject: [PATCH 21/29] Updating spherical mash hashmap creation Since #2155 is now merged --- parcels/spatialhash.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/parcels/spatialhash.py b/parcels/spatialhash.py index 3e30c3f07..c5a363d8b 100644 --- a/parcels/spatialhash.py +++ b/parcels/spatialhash.py @@ -138,8 +138,7 @@ def _initialize_face_hash_table(self): nface = (self._source_grid.xdim) * (self._source_grid.ydim) for eid in range(nface): for j in range(yi1[eid], yi2[eid] + 1): - if abs(xi1[eid] - xi2[eid]) > 225: - # TODO make sure this is only called when mesh_type is spherical; but requires #2155 + if self._source_grid.mesh == "spherical" and abs(xi1[eid] - xi2[eid]) > 225: pass elif xi1[eid] <= xi2[eid]: # Normal case, no wrap From 2924ee03d0ce892d947de294bc5bb6f20e1b2ed6 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 25 Aug 2025 09:13:15 +0200 Subject: [PATCH 22/29] Fixing grid._mesh in interpolator --- parcels/application_kernels/interpolation.py | 14 +++++++------- parcels/spatialhash.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 5ad3250d2..ce35f0a3c 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -148,22 +148,22 @@ def CGrid_Velocity( px = np.array([grid.lon[yi, xi], grid.lon[yi, xi + 1], grid.lon[yi + 1, xi + 1], grid.lon[yi + 1, xi]]) py = np.array([grid.lat[yi, xi], grid.lat[yi, xi + 1], grid.lat[yi + 1, xi + 1], grid.lat[yi + 1, xi]]) - if vectorfield._mesh == "spherical": + if grid._mesh == "spherical": px[0] = np.where(px[0] < x - 225, px[0] + 360, px[0]) px[0] = np.where(px[0] > x + 225, px[0] - 360, px[0]) px[1:] = np.where(px[1:] - px[0] > 180, px[1:] - 360, px[1:]) px[1:] = np.where(-px[1:] + px[0] > 180, px[1:] + 360, px[1:]) c1 = i_u._geodetic_distance( - py[0], py[1], px[0], px[1], vectorfield._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(0.0, xsi), py) + py[0], py[1], px[0], px[1], grid._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(0.0, xsi), py) ) c2 = i_u._geodetic_distance( - py[1], py[2], px[1], px[2], vectorfield._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(eta, 1.0), py) + py[1], py[2], px[1], px[2], grid._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(eta, 1.0), py) ) c3 = i_u._geodetic_distance( - py[2], py[3], px[2], px[3], vectorfield._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(1.0, xsi), py) + py[2], py[3], px[2], px[3], grid._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(1.0, xsi), py) ) c4 = i_u._geodetic_distance( - py[3], py[0], px[3], px[0], vectorfield._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(eta, 0.0), py) + py[3], py[0], px[3], px[0], grid._mesh, np.einsum("ij,ji->i", i_u.phi2D_lin(eta, 0.0), py) ) lenT = 2 if np.any(tau > 0) else 1 @@ -245,9 +245,9 @@ def CGrid_Velocity( deg2m = 1852 * 60.0 if applyConversion: - meshJac = (deg2m * deg2m * np.cos(np.deg2rad(y))) if vectorfield._mesh == "spherical" else 1 + meshJac = (deg2m * deg2m * np.cos(np.deg2rad(y))) if grid._mesh == "spherical" else 1 else: - meshJac = deg2m if vectorfield._mesh == "spherical" else 1 + meshJac = deg2m if grid._mesh == "spherical" else 1 jac = i_u._compute_jacobian_determinant(py, px, eta, xsi) * meshJac diff --git a/parcels/spatialhash.py b/parcels/spatialhash.py index 1faa5d345..3ce163efe 100644 --- a/parcels/spatialhash.py +++ b/parcels/spatialhash.py @@ -138,7 +138,7 @@ def _initialize_face_hash_table(self): nface = (self._source_grid.xdim) * (self._source_grid.ydim) for eid in range(nface): for j in range(yi1[eid], yi2[eid] + 1): - if self._source_grid.mesh == "spherical" and abs(xi1[eid] - xi2[eid]) > 225: + if self._source_grid._mesh == "spherical" and abs(xi1[eid] - xi2[eid]) > 225: pass elif xi1[eid] <= xi2[eid]: # Normal case, no wrap From 1a17911b139945cfd5ae2b1edcdc91bb91b63336 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 25 Aug 2025 11:31:38 +0200 Subject: [PATCH 23/29] Using is_dask_collection to check for dask in interpolation --- parcels/application_kernels/interpolation.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index ce35f0a3c..2fb94e1e2 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -4,9 +4,9 @@ from typing import TYPE_CHECKING -import dask.array as dask import numpy as np import xarray as xr +from dask import is_dask_collection import parcels.tools.interpolation_utils as i_u @@ -113,7 +113,7 @@ def XLinear( + (1 - xsi) * eta * corner_data[:, 2] + xsi * eta * corner_data[:, 3] ) - return value.compute() if isinstance(value, dask.Array) else value + return value.compute() if is_dask_collection(value) else value def CGrid_Velocity( @@ -263,7 +263,7 @@ def CGrid_Velocity( + (eta * U + xsi * V) * py[2] + (-eta * U + (1 - xsi) * V) * py[3] ) / jac - if isinstance(u, dask.Array): + if is_dask_collection(u): u = u.compute() v = v.compute() @@ -312,7 +312,7 @@ def CGrid_Velocity( corner_data = corner_data[0, :, :] w = corner_data[0, :] * (1 - zeta) + corner_data[1, :] * zeta - if isinstance(w, dask.Array): + if is_dask_collection(w): w = w.compute() else: w = np.zeros_like(u) @@ -372,7 +372,7 @@ def CGrid_Tracer( else: value = value[0, :] - return value.compute() if isinstance(value, dask.Array) else value + return value.compute() if is_dask_collection(value) else value def UXPiecewiseConstantFace( From 5bdc874782a400627bb4b9cc03805c072c17a16d Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 28 Aug 2025 11:26:08 +0200 Subject: [PATCH 24/29] Fixing vector_interp_method --- parcels/application_kernels/interpolation.py | 15 +++++++++++++++ parcels/field.py | 13 ++++++++----- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 2fb94e1e2..396843b07 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -22,6 +22,7 @@ "UXPiecewiseLinearNode", "XLinear", "ZeroInterpolator", + "ZeroInterpolator_Vector", ] @@ -39,6 +40,20 @@ def ZeroInterpolator( return 0.0 +def ZeroInterpolator_Vector( + vectorfield: VectorField, + ti: int, + position: dict[str, tuple[int, float | np.ndarray]], + tau: np.float32 | np.float64, + t: np.float32 | np.float64, + z: np.float32 | np.float64, + y: np.float32 | np.float64, + x: np.float32 | np.float64, +) -> np.float32 | np.float64: + """Template function used for the signature check of the interpolation methods for velocity fields.""" + return 0.0 + + def XLinear( field: Field, ti: int, diff --git a/parcels/field.py b/parcels/field.py index 58e4201c9..a91c62bbd 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -11,10 +11,13 @@ from parcels._core.utils.time import TimeInterval from parcels._reprs import default_repr -from parcels._typing import ( - VectorType, +from parcels._typing import VectorType +from parcels.application_kernels.interpolation import ( + UXPiecewiseLinearNode, + XLinear, + ZeroInterpolator, + ZeroInterpolator_Vector, ) -from parcels.application_kernels.interpolation import CGrid_Velocity, UXPiecewiseLinearNode, XLinear, ZeroInterpolator from parcels.particle import KernelParticle from parcels.tools.converters import ( UnitConverter, @@ -282,7 +285,7 @@ def __init__( if vector_interp_method is None: self._vector_interp_method = None else: - _assert_same_function_signature(vector_interp_method, ref=CGrid_Velocity) + _assert_same_function_signature(vector_interp_method, ref=ZeroInterpolator_Vector) self._vector_interp_method = vector_interp_method if U.grid._mesh != V.grid._mesh or (W and U.grid._mesh != W.grid._mesh): @@ -301,7 +304,7 @@ def vector_interp_method(self): @vector_interp_method.setter def vector_interp_method(self, method: Callable): - _assert_same_function_signature(method, ref=CGrid_Velocity) + _assert_same_function_signature(method, ref=ZeroInterpolator_Vector) self._vector_interp_method = method def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): From 6659a1d3e53e0fc3d51f6b2e69083a0e99ebdf55 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 1 Sep 2025 12:11:06 +0200 Subject: [PATCH 25/29] fixing merging bugs --- parcels/_index_search.py | 27 ++++++++------------ parcels/application_kernels/interpolation.py | 1 + 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 77e433e45..4ca2bd72f 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -5,6 +5,7 @@ import numpy as np +from parcels._typing import Mesh from parcels.tools.statuscodes import ( _raise_field_out_of_bound_error, _raise_field_sampling_error, @@ -110,21 +111,13 @@ def _search_indices_curvilinear_2d( return (yi, eta, xi, xsi) -def _reconnect_bnd_indices(yi: int, xi: int, ydim: int, xdim: int, sphere_mesh: bool): - if xi < 0: - if sphere_mesh: - xi = xdim - 2 - else: - xi = 0 - if xi > xdim - 2: - if sphere_mesh: - xi = 0 - else: - xi = xdim - 2 - if yi < 0: - yi = 0 - if yi > ydim - 2: - yi = ydim - 2 - if sphere_mesh: - xi = xdim - xi +def _reconnect_bnd_indices(yi: int, xi: int, ydim: int, xdim: int, mesh: Mesh): + xi = np.where(xi < 0, (xdim - 2) if mesh == "spherical" else 0, xi) + xi = np.where(xi > xdim - 2, 0 if mesh == "spherical" else (xdim - 2), xi) + + xi = np.where(yi > ydim - 2, xdim - xi if mesh == "spherical" else xi, xi) + + yi = np.where(yi < 0, 0, yi) + yi = np.where(yi > ydim - 2, ydim - 2, yi) + return yi, xi diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index 396843b07..a1816dbeb 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -49,6 +49,7 @@ def ZeroInterpolator_Vector( z: np.float32 | np.float64, y: np.float32 | np.float64, x: np.float32 | np.float64, + applyConversion: bool, ) -> np.float32 | np.float64: """Template function used for the signature check of the interpolation methods for velocity fields.""" return 0.0 From 2568feb08be8af1e288169e8223ab27a8d909223 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 1 Sep 2025 14:52:31 +0200 Subject: [PATCH 26/29] Using is_dask_collection for c-grid interpolator --- parcels/application_kernels/interpolation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parcels/application_kernels/interpolation.py b/parcels/application_kernels/interpolation.py index f0aa10f0e..467d4280d 100644 --- a/parcels/application_kernels/interpolation.py +++ b/parcels/application_kernels/interpolation.py @@ -452,7 +452,7 @@ def XNearest( else: value = corner_data[0, :] - return value.compute() if isinstance(value, dask.Array) else value + return value.compute() if is_dask_collection(value) else value def UXPiecewiseConstantFace( From 525ec71c6a9cc60a795e428eb9a6c58bc05e1933 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 3 Sep 2025 13:55:36 +0200 Subject: [PATCH 27/29] Merge branch 'v4-dev' into c-grid-interpolation --- docs/v4/index.md | 2 +- parcels/_index_search.py | 13 ++- parcels/field.py | 15 ++- parcels/kernel.py | 23 +++-- parcels/particleset.py | 123 ++++++++++++----------- parcels/tools/statuscodes.py | 55 ++++++++--- tests/v4/test_index_search.py | 36 ++++++- tests/v4/test_particleset.py | 47 --------- tests/v4/test_particleset_execute.py | 141 +++++++++++++++++++++++++-- v3to4-breaking-changes.md | 23 +++-- 10 files changed, 323 insertions(+), 155 deletions(-) diff --git a/docs/v4/index.md b/docs/v4/index.md index 94831395b..d3f8157a2 100644 --- a/docs/v4/index.md +++ b/docs/v4/index.md @@ -7,7 +7,7 @@ The key goals of this update are 1. to support `Fields` on unstructured grids; 2. to allow for user-defined interpolation methods (somewhat similar to user-defined kernels); 3. to make the codebase more modular, easier to extend, and more maintainable; -4. to align Parcels more with other tools in the [Pangeo ecosystemand](https://www.pangeo.io/#ecosystem), particularly by leveraging `xarray` more; and +4. to align Parcels more with other tools in the [Pangeo ecosystem](https://www.pangeo.io/#ecosystem), particularly by leveraging `xarray` more; and 5. to improve the performance of Parcels. The timeline for the release of Parcels v4 is not yet fixed, but we are aiming for a release of an 'alpha' version in September 2025. This v4-alpha will have support for unstructured grids and user-defined interpolation methods, but is not yet performance-optimised. diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 4ca2bd72f..6c7f8a26c 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -7,8 +7,7 @@ from parcels._typing import Mesh from parcels.tools.statuscodes import ( - _raise_field_out_of_bound_error, - _raise_field_sampling_error, + _raise_grid_searching_error, _raise_time_extrapolation_error, ) @@ -65,12 +64,12 @@ def _search_indices_curvilinear_2d( # TODO: Re-enable in some capacity # if x < field.lonlat_minmax[0] or x > field.lonlat_minmax[1]: # if grid.lon[0, 0] < grid.lon[0, -1]: - # _raise_field_out_of_bound_error(y, x) + # _raise_grid_searching_error(y, x) # elif x < grid.lon[0, 0] and x > grid.lon[0, -1]: # This prevents from crashing in [160, -160] - # _raise_field_out_of_bound_error(z, y, x) + # _raise_grid_searching_error(z, y, x) # if y < field.lonlat_minmax[2] or y > field.lonlat_minmax[3]: - # _raise_field_out_of_bound_error(z, y, x) + # _raise_grid_searching_error(z, y, x) while np.any(xsi < -tol) or np.any(xsi > 1 + tol) or np.any(eta < -tol) or np.any(eta > 1 + tol): px = np.array([grid.lon[yi, xi], grid.lon[yi, xi + 1], grid.lon[yi + 1, xi + 1], grid.lon[yi + 1, xi]]) @@ -102,12 +101,12 @@ def _search_indices_curvilinear_2d( it += 1 if it > maxIterSearch: print(f"Correct cell not found after {maxIterSearch} iterations") - _raise_field_out_of_bound_error(0, y, x) + _raise_grid_searching_error(0, y, x) xsi = np.where(xsi < 0.0, 0.0, np.where(xsi > 1.0, 1.0, xsi)) eta = np.where(eta < 0.0, 0.0, np.where(eta > 1.0, 1.0, eta)) if np.any((xsi < 0) | (xsi > 1) | (eta < 0) | (eta > 1)): - _raise_field_sampling_error(y, x) + _raise_grid_searching_error(y, x) return (yi, eta, xi, xsi) diff --git a/parcels/field.py b/parcels/field.py index a91c62bbd..33d405e79 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -242,6 +242,8 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): value = self._interp_method(self, ti, position, tau, time, z, y, x) + _update_particle_states_interp_value(particle, value) + if applyConversion: value = self.units.to_target(value, z, y, x) return value @@ -328,6 +330,8 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): v = self.V._interp_method(self.V, ti, position, tau, time, z, y, x) if "3D" in self.vector_type: w = self.W._interp_method(self.W, ti, position, tau, time, z, y, x) + else: + w = 0.0 if applyConversion: u = self.U.units.to_target(u, z, y, x) @@ -336,7 +340,8 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): else: (u, v, w) = self._vector_interp_method(self, ti, position, tau, time, z, y, x, applyConversion) - _update_particle_states_velocity(particle, {"U": u}) + for vel in (u, v, w): + _update_particle_states_interp_value(particle, vel) if applyConversion and ("3D" in self.vector_type): w = self.W.units.to_target(w, z, y, x) if self.W else 0.0 @@ -375,11 +380,11 @@ def _update_particle_states_position(particle, position): ) -def _update_particle_states_velocity(particle, velocities): - """Update the particle states based on the velocity dictionary, but only if state is not an Error already.""" - if particle and "U" in velocities: +def _update_particle_states_interp_value(particle, value): + """Update the particle states based on the interpolated value, but only if state is not an Error already.""" + if particle: particle.state = np.maximum( - np.where(np.isnan(velocities["U"]), StatusCode.Error, particle.state), particle.state + np.where(np.isnan(value), StatusCode.ErrorInterpolation, particle.state), particle.state ) diff --git a/parcels/kernel.py b/parcels/kernel.py index 467724ee8..e4c42238f 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -15,9 +15,11 @@ from parcels.basegrid import GridType from parcels.tools.statuscodes import ( StatusCode, + _raise_field_interpolation_error, _raise_field_out_of_bound_error, _raise_field_out_of_bound_surface_error, - _raise_field_sampling_error, + _raise_general_error, + _raise_grid_searching_error, _raise_time_extrapolation_error, ) from parcels.tools.warnings import KernelWarning @@ -28,6 +30,16 @@ __all__ = ["Kernel"] +ErrorsToThrow = { + StatusCode.ErrorTimeExtrapolation: _raise_time_extrapolation_error, + StatusCode.ErrorOutOfBounds: _raise_field_out_of_bound_error, + StatusCode.ErrorThroughSurface: _raise_field_out_of_bound_surface_error, + StatusCode.ErrorInterpolation: _raise_field_interpolation_error, + StatusCode.ErrorGridSearching: _raise_grid_searching_error, + StatusCode.Error: _raise_general_error, +} + + class Kernel: """Kernel object that encapsulates auto-generated code. @@ -270,14 +282,7 @@ def execute(self, pset, endtime, dt): if np.any(pset.state == StatusCode.StopAllExecution): return StatusCode.StopAllExecution - errors_to_throw = { - StatusCode.ErrorTimeExtrapolation: _raise_time_extrapolation_error, - StatusCode.ErrorOutOfBounds: _raise_field_out_of_bound_error, - StatusCode.ErrorThroughSurface: _raise_field_out_of_bound_surface_error, - StatusCode.Error: _raise_field_sampling_error, - } - - for error_code, error_func in errors_to_throw.items(): + for error_code, error_func in ErrorsToThrow.items(): if np.any(pset.state == error_code): inds = pset.state == error_code if error_code == StatusCode.ErrorTimeExtrapolation: diff --git a/parcels/particleset.py b/parcels/particleset.py index 828f85c5e..f67b6b34e 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -1,6 +1,7 @@ import sys import warnings from collections.abc import Iterable +from typing import Literal import numpy as np import xarray as xr @@ -506,59 +507,17 @@ def execute( if output_file: output_file.metadata["parcels_kernels"] = self._kernel.name - if (dt is not None) and (not isinstance(dt, np.timedelta64)): - raise TypeError("dt must be a np.timedelta64 object") - if dt is None or np.isnat(dt): + if dt is None: dt = np.timedelta64(1, "s") - self._data["dt"][:] = dt - sign_dt = np.sign(dt).astype(int) - if sign_dt not in [-1, 1]: - raise ValueError("dt must be a positive or negative np.timedelta64 object") - if self.fieldset.time_interval is None: - start_time = np.timedelta64(0, "s") # For the execution loop, we need a start time as a timedelta object - if runtime is None: - raise TypeError("The runtime must be provided when the time_interval is not defined for a fieldset.") + if not isinstance(dt, np.timedelta64) or np.isnat(dt) or (sign_dt := np.sign(dt).astype(int)) not in [-1, 1]: + raise ValueError(f"dt must be a positive or negative np.timedelta64 object, got {dt=!r}") - else: - if isinstance(runtime, np.timedelta64): - end_time = runtime - else: - raise TypeError("The runtime must be a np.timedelta64 object") + self._data["dt"][:] = dt - else: - if not np.isnat(self.time_nextloop).any(): - if sign_dt > 0: - start_time = self.time_nextloop.min() - else: - start_time = self.time_nextloop.max() - else: - if sign_dt > 0: - start_time = self.fieldset.time_interval.left - else: - start_time = self.fieldset.time_interval.right - - if runtime is None: - if endtime is None: - raise ValueError( - "Must provide either runtime or endtime when time_interval is defined for a fieldset." - ) - # Ensure that the endtime uses the same type as the start_time - if isinstance(endtime, self.fieldset.time_interval.left.__class__): - if sign_dt > 0: - if endtime < self.fieldset.time_interval.left: - raise ValueError("The endtime must be after the start time of the fieldset.time_interval") - end_time = min(endtime, self.fieldset.time_interval.right) - else: - if endtime > self.fieldset.time_interval.right: - raise ValueError( - "The endtime must be before the end time of the fieldset.time_interval when dt < 0" - ) - end_time = max(endtime, self.fieldset.time_interval.left) - else: - raise TypeError("The endtime must be of the same type as the fieldset.time_interval start time.") - else: - end_time = start_time + runtime * sign_dt + start_time, end_time = _get_simulation_start_and_end_times( + self.fieldset.time_interval, self._data["time_nextloop"], runtime, endtime, sign_dt + ) # Set the time of the particles if it hadn't been set on initialisation if np.isnat(self._data["time"]).any(): @@ -619,15 +578,69 @@ def _warn_particle_times_outside_fieldset_time_bounds(release_times: np.ndarray, if isinstance(time.left, np.datetime64) and isinstance(release_times[0], np.timedelta64): release_times = np.array([t + time.left for t in release_times]) - if np.any(release_times < time.left): + if np.any(release_times < time.left) or np.any(release_times > time.right): warnings.warn( "Some particles are set to be released outside the FieldSet's executable time domain.", ParticleSetWarning, stacklevel=2, ) - if np.any(release_times > time.right): - warnings.warn( - "Some particles are set to be released after the fieldset's last time and the fields are not constant in time.", - ParticleSetWarning, - stacklevel=2, + + +def _get_simulation_start_and_end_times( + time_interval: TimeInterval, + particle_release_times: np.ndarray, + runtime: np.timedelta64 | None, + endtime: np.datetime64 | None, + sign_dt: Literal[-1, 1], +) -> tuple[np.datetime64, np.datetime64]: + if runtime is not None and endtime is not None: + raise ValueError( + f"runtime and endtime are mutually exclusive - provide one or the other. Got {runtime=!r}, {endtime=!r}" ) + + if runtime is None and time_interval is None: + raise ValueError("The runtime must be provided when the time_interval is not defined for a fieldset.") + + if sign_dt == 1: + first_release_time = particle_release_times.min() + else: + first_release_time = particle_release_times.max() + + start_time = _get_start_time(first_release_time, time_interval, sign_dt, runtime) + + if endtime is None: + if not isinstance(runtime, np.timedelta64): + raise ValueError(f"The runtime must be a np.timedelta64 object. Got {type(runtime)}") + + endtime = start_time + sign_dt * runtime + + if time_interval is not None: + if type(endtime) != type(time_interval.left): # noqa: E721 + raise ValueError( + f"The endtime must be of the same type as the fieldset.time_interval start time. Got {endtime=!r} with {time_interval=!r}" + ) + if endtime not in time_interval: + msg = ( + f"Calculated/provided end time of {endtime!r} is not in fieldset time interval {time_interval!r}. Either reduce your runtime, modify your " + "provided endtime, or change your release timing." + "Important info:\n" + f" First particle release: {first_release_time!r}\n" + f" runtime: {runtime!r}\n" + f" (calculated) endtime: {endtime!r}" + ) + raise ValueError(msg) + + return start_time, endtime + + +def _get_start_time(first_release_time, time_interval, sign_dt, runtime): + if time_interval is None: + time_interval = TimeInterval(left=np.timedelta64(0, "s"), right=runtime) + + if sign_dt == 1: + fieldset_start = time_interval.left + else: + fieldset_start = time_interval.right + + start_time = first_release_time if not np.isnat(first_release_time) else fieldset_start + return start_time diff --git a/parcels/tools/statuscodes.py b/parcels/tools/statuscodes.py index 94589b8d8..aa84b3ef4 100644 --- a/parcels/tools/statuscodes.py +++ b/parcels/tools/statuscodes.py @@ -7,9 +7,11 @@ "KernelError", "StatusCode", "TimeExtrapolationError", + "_raise_field_interpolation_error", "_raise_field_out_of_bound_error", "_raise_field_out_of_bound_surface_error", - "_raise_field_sampling_error", + "_raise_general_error", + "_raise_grid_searching_error", "_raise_time_extrapolation_error", ] @@ -25,37 +27,38 @@ class StatusCode: StopAllExecution = 41 Error = 50 ErrorInterpolation = 51 + ErrorGridSearching = 52 ErrorOutOfBounds = 60 ErrorThroughSurface = 61 ErrorTimeExtrapolation = 70 -class FieldSamplingError(RuntimeError): - """Utility error class to propagate erroneous field sampling.""" +class FieldInterpolationError(RuntimeError): + """Utility error class to propagate NaN field interpolation.""" pass +def _raise_field_interpolation_error(z, y, x): + raise FieldInterpolationError(f"Field interpolation returned NaN at (depth={z}, lat={y}, lon={x})") + + class FieldOutOfBoundError(RuntimeError): """Utility error class to propagate out-of-bound field sampling.""" pass +def _raise_field_out_of_bound_error(z, y, x): + raise FieldOutOfBoundError(f"Field sampled out-of-bound, at (depth={z}, lat={y}, lon={x})") + + class FieldOutOfBoundSurfaceError(RuntimeError): """Utility error class to propagate out-of-bound field sampling at the surface.""" pass -def _raise_field_sampling_error(z, y, x): - raise FieldSamplingError(f"Field sampled at (depth={z}, lat={y}, lon={x})") - - -def _raise_field_out_of_bound_error(z, y, x): - raise FieldOutOfBoundError(f"Field sampled out-of-bound, at (depth={z}, lat={y}, lon={x})") - - def _raise_field_out_of_bound_surface_error(z: float | None, y: float | None, x: float | None) -> None: def format_out(val): return "unknown" if val is None else val @@ -65,6 +68,32 @@ def format_out(val): ) +class FieldSamplingError(RuntimeError): + """Utility error class to propagate field sampling errors.""" + + pass + + +class GridSearchingError(RuntimeError): + """Utility error class to propagate grid searching errors.""" + + pass + + +def _raise_grid_searching_error(z, y, x): + raise GridSearchingError(f"Grid searching failed at (depth={z}, lat={y}, lon={x})") + + +class GeneralError(RuntimeError): + """Utility error class to propagate general errors.""" + + pass + + +def _raise_general_error(z, y, x): + raise GeneralError(f"General error occurred at (depth={z}, lat={y}, lon={x})") + + class TimeExtrapolationError(RuntimeError): """Utility error class to propagate erroneous time extrapolation sampling.""" @@ -91,9 +120,11 @@ def __init__(self, particle, fieldset=None, msg=None): AllParcelsErrorCodes = { - FieldSamplingError: StatusCode.Error, + FieldInterpolationError: StatusCode.ErrorInterpolation, FieldOutOfBoundError: StatusCode.ErrorOutOfBounds, FieldOutOfBoundSurfaceError: StatusCode.ErrorThroughSurface, + GridSearchingError: StatusCode.ErrorGridSearching, TimeExtrapolationError: StatusCode.ErrorTimeExtrapolation, KernelError: StatusCode.Error, + GeneralError: StatusCode.Error, } diff --git a/tests/v4/test_index_search.py b/tests/v4/test_index_search.py index 5c1a47307..5606913fd 100644 --- a/tests/v4/test_index_search.py +++ b/tests/v4/test_index_search.py @@ -1,12 +1,13 @@ import numpy as np import pytest +import xarray as xr from parcels._datasets.structured.generic import datasets from parcels._index_search import _search_indices_curvilinear_2d from parcels.field import Field -from parcels.xgrid import ( - XGrid, -) +from parcels.tools.exampledata_utils import download_example_dataset +from parcels.xgcm import Grid +from parcels.xgrid import XGrid @pytest.fixture @@ -51,3 +52,32 @@ def test_grid_indexing_fpoints(field_cone): ] assert x > np.min(cell_lon) and x < np.max(cell_lon) assert y > np.min(cell_lat) and y < np.max(cell_lat) + + +def test_indexing_nemo_curvilinear(): + data_folder = download_example_dataset("NemoCurvilinear_data") + ds = xr.open_mfdataset( + data_folder.glob("*.nc4"), combine="nested", data_vars="minimal", coords="minimal", compat="override" + ) + ds = ds.isel({"time_counter": 0, "time": 0, "z_a": 0}, drop=True).rename( + {"glamf": "lon", "gphif": "lat", "z": "depth"} + ) + xgcm_grid = Grid(ds, coords={"X": {"left": "x"}, "Y": {"left": "y"}}, periodic=False) + grid = XGrid(xgcm_grid) + + # Test points on the NEMO 1/4 degree curvilinear grid + lats = np.array([-30, 0, 88]) + lons = np.array([30, 60, -150]) + + yi, eta, xi, xsi = _search_indices_curvilinear_2d(grid, lats, lons) + + # Construct cornerpoints px + px = np.array([grid.lon[yi, xi], grid.lon[yi, xi + 1], grid.lon[yi + 1, xi + 1], grid.lon[yi + 1, xi]]) + + # Maximum 5 degree difference between px values + for i in range(lons.shape[0]): + np.testing.assert_allclose(px[1, i], px[:, i], atol=5) + + # Reconstruct lons values from cornerpoints + xx = (1 - xsi) * (1 - eta) * px[0] + xsi * (1 - eta) * px[1] + xsi * eta * px[2] + (1 - xsi) * eta * px[3] + np.testing.assert_allclose(xx, lons, atol=1e-6) diff --git a/tests/v4/test_particleset.py b/tests/v4/test_particleset.py index 7c3f2058c..f3f2c48fa 100644 --- a/tests/v4/test_particleset.py +++ b/tests/v4/test_particleset.py @@ -114,21 +114,6 @@ def test_pset_create_outside_time(fieldset): ParticleSet(fieldset, pclass=Particle, lon=[0] * len(time), lat=[0] * len(time), time=time) -@pytest.mark.parametrize( - "dt, expectation", - [ - (np.timedelta64(5, "s"), does_not_raise()), - (5.0, pytest.raises(TypeError)), - (np.datetime64("2000-01-02T00:00:00"), pytest.raises(TypeError)), - (timedelta(seconds=2), pytest.raises(TypeError)), - ], -) -def test_particleset_dt_type(fieldset, dt, expectation): - pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], depth=[50.0], pclass=Particle) - with expectation: - pset.execute(runtime=np.timedelta64(10, "s"), dt=dt, pyfunc=DoNothing) - - def test_pset_starttime_not_multiple_dt(fieldset): times = [0, 1, 2] datetimes = [fieldset.time_interval.left + np.timedelta64(t, "s") for t in times] @@ -141,38 +126,6 @@ def Addlon(particle, fieldset, time): # pragma: no cover assert np.allclose([p.lon_nextloop for p in pset], [8 - t for t in times]) -@pytest.mark.parametrize( - "runtime, expectation", - [ - (np.timedelta64(5, "s"), does_not_raise()), - (5.0, pytest.raises(TypeError)), - (timedelta(seconds=2), pytest.raises(TypeError)), - (np.datetime64("2001-01-02T00:00:00"), pytest.raises(TypeError)), - (datetime(2000, 1, 2, 0, 0, 0), pytest.raises(TypeError)), - ], -) -def test_particleset_runtime_type(fieldset, runtime, expectation): - pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], depth=[50.0], pclass=Particle) - with expectation: - pset.execute(runtime=runtime, dt=np.timedelta64(10, "s"), pyfunc=DoNothing) - - -@pytest.mark.parametrize( - "endtime, expectation", - [ - (np.datetime64("2000-01-02T00:00:00"), does_not_raise()), - (5.0, pytest.raises(TypeError)), - (np.timedelta64(5, "s"), pytest.raises(TypeError)), - (timedelta(seconds=2), pytest.raises(TypeError)), - (datetime(2000, 1, 2, 0, 0, 0), pytest.raises(TypeError)), - ], -) -def test_particleset_endtime_type(fieldset, endtime, expectation): - pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], depth=[50.0], pclass=Particle) - with expectation: - pset.execute(endtime=endtime, dt=np.timedelta64(10, "m"), pyfunc=DoNothing) - - def test_pset_add_explicit(fieldset): npart = 11 lon = np.linspace(0, 1, npart) diff --git a/tests/v4/test_particleset_execute.py b/tests/v4/test_particleset_execute.py index 68a699626..0c6a4baf6 100644 --- a/tests/v4/test_particleset_execute.py +++ b/tests/v4/test_particleset_execute.py @@ -1,3 +1,6 @@ +from contextlib import nullcontext as does_not_raise +from datetime import datetime, timedelta + import numpy as np import pytest @@ -14,7 +17,7 @@ from parcels._datasets.structured.generated import simple_UV_dataset from parcels._datasets.structured.generic import datasets as datasets_structured from parcels._datasets.unstructured.generic import datasets as datasets_unstructured -from parcels.tools.statuscodes import FieldOutOfBoundError, TimeExtrapolationError +from parcels.tools.statuscodes import FieldInterpolationError, FieldOutOfBoundError, TimeExtrapolationError from parcels.uxgrid import UxGrid from parcels.xgrid import XGrid from tests import utils @@ -27,7 +30,20 @@ def fieldset() -> FieldSet: grid = XGrid.from_dataset(ds, mesh="flat") U = Field("U", ds["U (A grid)"], grid) V = Field("V", ds["V (A grid)"], grid) - return FieldSet([U, V]) + UV = VectorField("UV", U, V) + return FieldSet([U, V, UV]) + + +@pytest.fixture +def fieldset_no_time_interval() -> FieldSet: + # i.e., no time variation + ds = datasets_structured["ds_2d_left"].isel(time=0).drop("time") + + grid = XGrid.from_dataset(ds, mesh="flat") + U = Field("U", ds["U (A grid)"], grid) + V = Field("V", ds["V (A grid)"], grid) + UV = VectorField("UV", U, V) + return FieldSet([U, V, UV]) @pytest.fixture @@ -41,6 +57,98 @@ def zonal_flow_fieldset() -> FieldSet: return FieldSet([U, V, UV]) +def test_pset_execute_implicit_dt_one_second(fieldset): + pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle) + pset.execute(DoNothing, runtime=np.timedelta64(1, "s")) + + time = pset.time.copy() + + pset.execute(DoNothing, runtime=np.timedelta64(1, "s")) + np.testing.assert_array_equal(pset.time, time + np.timedelta64(1, "s")) + + +def test_pset_execute_invalid_arguments(fieldset, fieldset_no_time_interval): + for dt in [1, np.timedelta64(0, "s"), np.timedelta64(None)]: + with pytest.raises( + ValueError, + match="dt must be a positive or negative np.timedelta64 object, got .*", + ): + ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle).execute(dt=dt) + + with pytest.raises( + ValueError, + match="runtime and endtime are mutually exclusive - provide one or the other. Got .*", + ): + ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle).execute( + runtime=np.timedelta64(1, "s"), endtime=np.datetime64("2100-01-01") + ) + + with pytest.raises( + ValueError, + match="The runtime must be a np.timedelta64 object. Got .*", + ): + ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle).execute(runtime=1) + + msg = """Calculated/provided end time of .* is not in fieldset time interval .* Either reduce your runtime, modify your provided endtime, or change your release timing.*""" + with pytest.raises( + ValueError, + match=msg, + ): + ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle).execute(endtime=np.datetime64("1990-01-01")) + + with pytest.raises( + ValueError, + match=msg, + ): + ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle).execute( + endtime=np.datetime64("2100-01-01"), dt=np.timedelta64(-1, "s") + ) + + with pytest.raises( + ValueError, + match="The endtime must be of the same type as the fieldset.time_interval start time. Got .*", + ): + ParticleSet(fieldset, lon=[0.2], lat=[5.0], pclass=Particle).execute(endtime=12345) + + with pytest.raises( + ValueError, + match="The runtime must be provided when the time_interval is not defined for a fieldset.", + ): + ParticleSet(fieldset_no_time_interval, lon=[0.2], lat=[5.0], pclass=Particle).execute() + + +@pytest.mark.parametrize( + "runtime, expectation", + [ + (np.timedelta64(5, "s"), does_not_raise()), + (5.0, pytest.raises(ValueError)), + (timedelta(seconds=2), pytest.raises(ValueError)), + (np.datetime64("2001-01-02T00:00:00"), pytest.raises(ValueError)), + (datetime(2000, 1, 2, 0, 0, 0), pytest.raises(ValueError)), + ], +) +def test_particleset_runtime_type(fieldset, runtime, expectation): + pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], depth=[50.0], pclass=Particle) + with expectation: + pset.execute(runtime=runtime, dt=np.timedelta64(10, "s"), pyfunc=DoNothing) + + +@pytest.mark.parametrize( + "endtime, expectation", + [ + (np.datetime64("2000-01-02T00:00:00"), does_not_raise()), + (5.0, pytest.raises(ValueError)), + (np.timedelta64(5, "s"), pytest.raises(ValueError)), + (timedelta(seconds=2), pytest.raises(ValueError)), + (datetime(2000, 1, 2, 0, 0, 0), pytest.raises(ValueError)), + ], +) +def test_particleset_endtime_type(fieldset, endtime, expectation): + pset = ParticleSet(fieldset, lon=[0.2], lat=[5.0], depth=[50.0], pclass=Particle) + with expectation: + pset.execute(endtime=endtime, dt=np.timedelta64(10, "m"), pyfunc=DoNothing) + + def test_pset_remove_particle_in_kernel(fieldset): npart = 100 pset = ParticleSet(fieldset, lon=np.linspace(0, 1, npart), lat=np.linspace(1, 0, npart)) @@ -92,7 +200,8 @@ def AddLat(particle, fieldset, time): # pragma: no cover def test_execution_endtime(fieldset, starttime, endtime, dt): starttime = fieldset.time_interval.left + np.timedelta64(starttime, "s") endtime = fieldset.time_interval.left + np.timedelta64(endtime, "s") - dt = np.timedelta64(dt, "s") + if dt is not None: + dt = np.timedelta64(dt, "s") pset = ParticleSet(fieldset, time=starttime, lon=0, lat=0) pset.execute(DoNothing, endtime=endtime, dt=dt) assert abs(pset.time_nextloop - endtime) < np.timedelta64(1, "ms") @@ -152,10 +261,29 @@ def test_some_particles_throw_outoftime(fieldset): pset = ParticleSet(fieldset, lon=np.zeros_like(time), lat=np.zeros_like(time), time=time) def FieldAccessOutsideTime(particle, fieldset, time): # pragma: no cover - fieldset.U[particle.time + np.timedelta64(1, "D"), particle.depth, particle.lat, particle.lon, particle] + fieldset.U[particle.time + np.timedelta64(400, "D"), particle.depth, particle.lat, particle.lon, particle] with pytest.raises(TimeExtrapolationError): - pset.execute(FieldAccessOutsideTime, runtime=np.timedelta64(400, "D"), dt=np.timedelta64(10, "D")) + pset.execute(FieldAccessOutsideTime, runtime=np.timedelta64(1, "D"), dt=np.timedelta64(10, "D")) + + +def test_raise_grid_searching_error(): ... + + +def test_raise_general_error(): ... + + +def test_errorinterpolation(fieldset): + def NaNInterpolator(field, ti, position, tau, t, z, y, x): # pragma: no cover + return np.nan * np.zeros_like(x) + + def SampleU(particle, fieldset, time): # pragma: no cover + fieldset.U[particle.time, particle.depth, particle.lat, particle.lon, particle] + + fieldset.U.interp_method = NaNInterpolator + pset = ParticleSet(fieldset, lon=[0, 2], lat=[0, 0]) + with pytest.raises(FieldInterpolationError): + pset.execute(SampleU, runtime=np.timedelta64(2, "s"), dt=np.timedelta64(1, "s")) def test_execution_check_stopallexecution(fieldset): @@ -200,7 +328,8 @@ def test_execution_runtime(fieldset, starttime, runtime, dt, npart): starttime = fieldset.time_interval.left + np.timedelta64(starttime, "s") runtime = np.timedelta64(runtime, "s") sign_dt = 1 if dt is None else np.sign(dt) - dt = np.timedelta64(dt, "s") + if dt is not None: + dt = np.timedelta64(dt, "s") pset = ParticleSet(fieldset, time=starttime, lon=np.zeros(npart), lat=np.zeros(npart)) pset.execute(DoNothing, runtime=runtime, dt=dt) assert all([abs(p.time_nextloop - starttime - runtime * sign_dt) < np.timedelta64(1, "ms") for p in pset]) diff --git a/v3to4-breaking-changes.md b/v3to4-breaking-changes.md index 14fe961f6..3afdb3e05 100644 --- a/v3to4-breaking-changes.md +++ b/v3to4-breaking-changes.md @@ -1,15 +1,18 @@ -Kernels: +## Kernels: -- `particle.delete()` is no longer valid. Have to do `particle.state = StatusCode.Delete` -- Sharing state between kernels must be done via the particle data (as now the kernels are not combined under the hood). -- dt is a np.timedelta64 object +- The Kernel loop has been 'vectorized', so that the input of a Kernel is not one particle anymore, but a list of particles. This means that `if`-statements in Kernels don't work anymore. Replace `if`-statements with `numpy.where` statements. +- `particle.delete()` is no longer valid. Instead, use `particle.state = StatusCode.Delete`. +- Sharing state between kernels must be done via the particle data (as the kernels are not combined under the hood anymore). +- `particl_dlon`, `particle_dlat` etc have been renamed to `particle.dlon` and `particle.dlat`. +- `particle.dt` is a np.timedelta64 object; be careful when multiplying `particle.dt` with a velocity, as its value may be cast to nanoseconds. +- The `time` argument in the Kernel signature is now standard `None` (and may be removed in the Kernel API before release of v4), so can't be used. Use `particle.time` instead. -FieldSet +## FieldSet -- `interp_method` has to be an Interpolation function, instead of a string +- `interp_method` has to be an Interpolation function, instead of a string. -ParticleSet +## ParticleSet -- ParticleSet init had `repeatdt` and `lonlatdepth_dtype` removed -- ParticleSet.execute() expects `numpy.datetime64`/`numpy.timedelta.64` for `runtime`, `endtime` and `dt` -- `ParticleSet.from_field()`, `ParticleSet.from_line()`, `ParticleSet.from_list()` has been removed +- `repeatdt` and `lonlatdepth_dtype` have been removed from the ParticleSet. +- ParticleSet.execute() expects `numpy.datetime64`/`numpy.timedelta.64` for `runtime`, `endtime` and `dt`. +- `ParticleSet.from_field()`, `ParticleSet.from_line()`, `ParticleSet.from_list()` have been removed. From 0ac7ae03373a58014024ed1cbe778b28993b313f Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 9 Sep 2025 08:26:32 +0200 Subject: [PATCH 28/29] Removing mesh types check for VectorField --- parcels/field.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 33d405e79..b248414a4 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -290,9 +290,6 @@ def __init__( _assert_same_function_signature(vector_interp_method, ref=ZeroInterpolator_Vector) self._vector_interp_method = vector_interp_method - if U.grid._mesh != V.grid._mesh or (W and U.grid._mesh != W.grid._mesh): - raise ValueError(f"Inconsistent mesh types: {U.grid._mesh}, {V.grid._mesh}, {W.grid._mesh}") - def __repr__(self): return f"""<{type(self).__name__}> name: {self.name!r} From 872b75c87f35e7f64446199820c011dea6438f2e Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 9 Sep 2025 10:52:26 +0200 Subject: [PATCH 29/29] Setting lon and lat as coordinates --- parcels/xgrid.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index d22a7e0e7..ac6dd89e2 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -101,10 +101,12 @@ def __init__(self, grid: xgcm.Grid, mesh="flat"): self._mesh = mesh self._spatialhash = None ds = grid._ds - if "lon" in ds and hasattr(ds["lon"], "load"): - ds["lon"].load() - if "lat" in ds and hasattr(ds["lat"], "load"): - ds["lat"].load() + + # Set the coordinates for the dataset (needed to be done explicitly for curvilinear grids) + if "lon" in ds: + ds.set_coords("lon") + if "lat" in ds: + ds.set_coords("lat") if len(set(grid.axes) & {"X", "Y", "Z"}) > 0: # Only if spatial grid is >0D (see #2054 for further development) assert_valid_lat_lon(ds["lat"], ds["lon"], grid.axes)