From 8edac525a7851e90bb5af38c73ab893d6e47478f Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 15 Dec 2025 08:03:57 +0100 Subject: [PATCH 1/4] First draft of nesting guide --- docs/examples/documentation_nesting.ipynb | 383 ++++++++++++++++++++++ pixi.toml | 1 + 2 files changed, 384 insertions(+) create mode 100644 docs/examples/documentation_nesting.ipynb diff --git a/docs/examples/documentation_nesting.ipynb b/docs/examples/documentation_nesting.ipynb new file mode 100644 index 000000000..0710f7ef3 --- /dev/null +++ b/docs/examples/documentation_nesting.ipynb @@ -0,0 +1,383 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Nested Grids documentation" + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "This how-to guide explains how to use nested grids in Parcels v4, using the new `uxarray` integration. We will demonstrate how to set up a simulation with multiple nested grids, and how to handle particle transitions between these grids.\n", + "\n", + "We wil base this on the LOCATE benchmark dataset ([Hernandez et al 2024](https://gmd.copernicus.org/articles/17/2221/2024/)), which contains a regional grid, a nested coastal grid and a nested harbour grid for the Barcelona region." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "import matplotlib.tri as mtri\n", + "import numpy as np\n", + "import uxarray as ux\n", + "import xarray as xr\n", + "from shapely.geometry import MultiPoint, Point, Polygon\n", + "from triangle import triangulate\n", + "\n", + "import parcels\n", + "\n", + "data_dir = \"/Users/erik/Desktop/Parcelsv4_test/Parcels_Benchmarks_Nested_LOCATE\"" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## Setting up the individual nest domains\n", + "We first load the three grids using `xarray`. Since the variable names differ between the regional and nested grids, we rename the temperature variable in the regional grid for consistency." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "nests = [\"TAR/harbour\", \"TAR/coastal\", \"regional\"]\n", + "ds_in = {}\n", + "for nest in nests:\n", + " ds_in[nest] = xr.open_mfdataset(f\"{data_dir}/{nest}/*.nc\")\n", + "\n", + "ds_in[\"regional\"] = ds_in[\"regional\"].rename({\"thetao\": \"temperature\"})" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "The two nested grids are rectangular, but rotated with respect to the regional grid. We will first identify the corners of the rectangles by finding the minimum-area rotated bounding box around the grid points using the `find_rotated_rectangle()` function. Of course, in a real-world application you would typically have the polygon coordinates available from the grid generation process so this step would not be necessary." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "def find_rotated_rectangle(da):\n", + " \"\"\"\n", + " Return the 4 corner coordinates (lon, lat) of the minimum-area rotated rectangle\n", + " that encloses the finite values in an xarray DataArray.\n", + " \"\"\"\n", + " i, j = np.nonzero(np.asarray(np.isfinite(da)))\n", + " pts = np.column_stack((da.longitude.values[j], da.latitude.values[i]))\n", + "\n", + " rect = MultiPoint(pts).convex_hull.minimum_rotated_rectangle\n", + " coords = np.array(rect.exterior.coords)[:-1] # last point repeats the first\n", + " return coords" + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": [ + "We plot the three grids and overlay the identified rectangles to verify that they correctly capture the extent of the nested grids." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "n_fieldsets = len(ds_in)\n", + "fig, axes = plt.subplots(1, n_fieldsets, figsize=(5 * n_fieldsets, 4))\n", + "\n", + "rectangle = {}\n", + "for ax, (name, ds) in zip(axes, ds_in.items()):\n", + " da = ds.temperature.isel(time=0)\n", + " da.plot(ax=ax)\n", + " rect = find_rotated_rectangle(da)\n", + " rectangle[name] = rect\n", + " ax.plot(\n", + " np.append(rect[:, 0], rect[0, 0]), np.append(rect[:, 1], rect[0, 1]), \"-r\", lw=2\n", + " )\n", + " ax.set_title(name)\n", + "\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "## Creating a Delaunay triangulation of the nests\n", + "\n", + "Now comes the important part: we need to create a Delaunay triangulation of the three nests, so that we can efficiently determine in which nest a particle is located at any given time. We use the `triangle` package to perform the triangulation, and `shapely` to handle the geometric operations. \n", + "\n", + "Note that we need to keep the edges of the rectangles in the triangulation, so we need a [constrained (PSLG) Delaunay triangulation](https://en.wikipedia.org/wiki/Constrained_Delaunay_triangulation).\n", + "\n", + "The result is a set of triangles covering the three nests, which we can use to determine in which nest a particle is located at any given time. It is important that the list of polygons is ordered so that the smallest nest is first, so that triangles in overlapping areas are assigned to the correct nest." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "def constrained_triangulate_keep_edges(polygons):\n", + " \"\"\"\n", + " Build a PSLG from the polygon boundaries, run constrained Delaunay (triangle)\n", + " so original polygon edges are preserved, then assign triangles to polygons\n", + " by centroid containment.\n", + "\n", + " Args:\n", + " polygons: list of (Ni,2) numpy arrays (polygon boundary points in order)\n", + "\n", + " Returns:\n", + " pts: (P x 2) vertices returned by triangle\n", + " tris: (n_face x 3) array of triangle vertex indices (into pts)\n", + " face_poly: (n_face,) array mapping each triangle -> polygon index (or -1 if outside)\n", + " \"\"\"\n", + " # flatten vertices and create segments so polygon edges are constrained\n", + " verts = []\n", + " segments = []\n", + " offset = 0\n", + " for poly in polygons:\n", + " Ni = len(poly)\n", + " verts.extend(poly.tolist())\n", + " segments.extend([[offset + j, offset + ((j + 1) % Ni)] for j in range(Ni)])\n", + " offset += Ni\n", + " verts = np.asarray(verts, dtype=float)\n", + " segments = np.asarray(segments, dtype=int)\n", + "\n", + " mode = \"p\" # \"p\" = PSLG (constrained triangulation)\n", + " B = triangulate({\"vertices\": verts, \"segments\": segments}, mode)\n", + "\n", + " pts = B[\"vertices\"]\n", + " tris = B[\"triangles\"].astype(int)\n", + "\n", + " # assign triangles to polygons using centroid test\n", + " shapely_polys = [Polygon(p) for p in polygons]\n", + " centers = pts[tris].mean(axis=1)\n", + " face_poly = np.full(len(tris), -1, dtype=int)\n", + " for ti, c in enumerate(centers):\n", + " for ip in range(len(shapely_polys)):\n", + " if shapely_polys[ip].contains(Point(c)):\n", + " face_poly[ti] = ip\n", + " break\n", + "\n", + " return pts, tris, face_poly" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "We can then run the triangulation and plot the resulting triangles to verify that they correctly cover the three nests." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": [ + "polygons = [r for r in rectangle.values()]\n", + "\n", + "points, face_tris, face_poly = constrained_triangulate_keep_edges(polygons)\n", + "\n", + "triangles_by_poly = {\n", + " i: face_tris[face_poly == i]\n", + " if np.any(face_poly == i)\n", + " else np.empty((0, 3), dtype=int)\n", + " for i in range(len(polygons))\n", + "}\n", + "\n", + "fig, ax = plt.subplots()\n", + "for i, tris in triangles_by_poly.items():\n", + " if tris.size:\n", + " ax.triplot(points[:, 0], points[:, 1], tris, label=f\"Nest {i}\")\n", + "ax.scatter(points[:, 0], points[:, 1], s=10, c=\"k\")\n", + "ax.set_aspect(\"equal\")\n", + "ax.legend()\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "13", + "metadata": {}, + "source": [ + "Then, we convert the triangulation into a Parcels FieldSet using `Parcels.UxGrid()`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14", + "metadata": {}, + "outputs": [], + "source": [ + "# build an xarray dataset compatible with UGRID / uxarray\n", + "n_node = points.shape[0]\n", + "n_face = face_tris.shape[0]\n", + "n_max_face_nodes = face_tris.shape[1]\n", + "\n", + "ds_tri = xr.Dataset(\n", + " {\n", + " \"node_lon\": ((\"n_node\",), points[:, 0]),\n", + " \"node_lat\": ((\"n_node\",), points[:, 1]),\n", + " \"face_node_connectivity\": ((\"n_face\", \"n_max_face_nodes\"), face_tris),\n", + " \"face_polygon\": (\n", + " (\n", + " \"time\",\n", + " \"nz\",\n", + " \"n_face\",\n", + " ),\n", + " face_poly[np.newaxis, np.newaxis, :],\n", + " {\n", + " \"long_name\": \"Nest id\",\n", + " \"description\": \"0=regional, 1=coastal\",\n", + " \"location\": \"face\",\n", + " \"mesh\": \"delaunay\",\n", + " },\n", + " ),\n", + " },\n", + " coords={\n", + " \"time\": np.array([np.timedelta64(0, \"ns\")]),\n", + " \"nz\": np.array([0]),\n", + " \"n_node\": np.arange(n_node),\n", + " \"n_face\": np.arange(n_face),\n", + " },\n", + " attrs={\"Conventions\": \"UGRID-1.0\"},\n", + ")\n", + "\n", + "uxda = ux.UxDataArray(ds_tri[\"face_polygon\"], uxgrid=ux.Grid(ds_tri))\n", + "\n", + "NestID = parcels.Field(\n", + " \"NestID\",\n", + " uxda,\n", + " parcels.UxGrid(uxda.uxgrid, z=uxda[\"nz\"], mesh=\"spherical\"),\n", + " interp_method=parcels.interpolators.UxPiecewiseConstantFace,\n", + ")\n", + "fieldset = parcels.FieldSet([NestID])" + ] + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "We can confirm that the FieldSet has been created correctly by running a Parcels simulation where particles sample the `NestID` field, which indicates in which nest each particle is located at any given time." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16", + "metadata": {}, + "outputs": [], + "source": [ + "X, Y = np.meshgrid(\n", + " np.linspace(0.3, 2.9, 50),\n", + " np.linspace(40.25, 41.7, 40),\n", + ")\n", + "\n", + "NestParticle = parcels.Particle.add_variable(parcels.Variable(\"nestID\", dtype=np.int32))\n", + "pset = parcels.ParticleSet(\n", + " fieldset, pclass=NestParticle, lon=X.flatten(), lat=Y.flatten()\n", + ")\n", + "\n", + "\n", + "def SampleNestID(particles, fieldset):\n", + " particles.nestID = fieldset.NestID[particles]\n", + "\n", + "\n", + "pset.execute(SampleNestID, runtime=np.timedelta64(1, \"s\"), dt=np.timedelta64(1, \"s\"))" + ] + }, + { + "cell_type": "markdown", + "id": "17", + "metadata": {}, + "source": [ + "Indeed, the visualisation below shows that particles correctly identify the nest they are in based on their location." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(1, 1, figsize=(10, 5))\n", + "\n", + "triang = mtri.Triangulation(\n", + " uxda.uxgrid.node_lon.values,\n", + " uxda.uxgrid.node_lat.values,\n", + " triangles=uxda.uxgrid.face_node_connectivity.values,\n", + ")\n", + "\n", + "plot_args = {\n", + " \"cmap\": \"viridis\",\n", + " \"edgecolors\": \"k\",\n", + " \"linewidth\": 0.5,\n", + " \"vmin\": 0,\n", + " \"vmax\": 2.0,\n", + "}\n", + "ax.tripcolor(\n", + " triang, facecolors=np.squeeze(uxda[0, :].values), shading=\"flat\", **plot_args\n", + ")\n", + "ax.scatter(pset.lon, pset.lat, c=pset.nestID, **plot_args)\n", + "ax.set_aspect(\"equal\")\n", + "ax.set_title(\"Nesting visualisation (triangulation and interpolated particle values)\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "test-latest", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pixi.toml b/pixi.toml index 5933e5905..80c2e5c17 100644 --- a/pixi.toml +++ b/pixi.toml @@ -39,6 +39,7 @@ xgcm = ">=0.9.0" cf_xarray = ">=0.8.6" cftime = ">=1.6.3" pooch = ">=1.8.0" +py-triangle = ">=20250106,<20250107" [dependencies] parcels = { path = "." } From 28e42540f7d46574e0ebcc85d71ef453828e1dda Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 15 Dec 2025 08:06:15 +0100 Subject: [PATCH 2/4] Remove unit test on NestedField --- tests-v3/test_fieldset_sampling.py | 77 ------------------------------ 1 file changed, 77 deletions(-) diff --git a/tests-v3/test_fieldset_sampling.py b/tests-v3/test_fieldset_sampling.py index 11dec1988..1a3c68623 100644 --- a/tests-v3/test_fieldset_sampling.py +++ b/tests-v3/test_fieldset_sampling.py @@ -14,7 +14,6 @@ Geographic, Particle, ParticleSet, - StatusCode, Variable, ) from tests.utils import create_fieldset_global @@ -796,82 +795,6 @@ def test_multiple_grid_addlater_error(): assert fail -@pytest.mark.v4alpha -@pytest.mark.xfail(reason="Implementation of NestedFields is being reconsidered in v4.") -def test_nestedfields(): - from parcels import NestedField - - xdim = 10 - ydim = 20 - - U1 = Field( - "U1", - 0.1 * np.ones((ydim, xdim), dtype=np.float32), - lon=np.linspace(0.0, 1.0, xdim, dtype=np.float32), - lat=np.linspace(0.0, 1.0, ydim, dtype=np.float32), - ) - V1 = Field( - "V1", - 0.2 * np.ones((ydim, xdim), dtype=np.float32), - lon=np.linspace(0.0, 1.0, xdim, dtype=np.float32), - lat=np.linspace(0.0, 1.0, ydim, dtype=np.float32), - ) - U2 = Field( - "U2", - 0.3 * np.ones((ydim, xdim), dtype=np.float32), - lon=np.linspace(0.0, 2.0, xdim, dtype=np.float32), - lat=np.linspace(0.0, 2.0, ydim, dtype=np.float32), - ) - V2 = Field( - "V2", - 0.4 * np.ones((ydim, xdim), dtype=np.float32), - lon=np.linspace(0.0, 2.0, xdim, dtype=np.float32), - lat=np.linspace(0.0, 2.0, ydim, dtype=np.float32), - ) - U = NestedField("U", [U1, U2]) - V = NestedField("V", [V1, V2]) - fieldset = FieldSet(U, V) - - P1 = Field( - "P1", - 0.1 * np.ones((ydim, xdim), dtype=np.float32), - lon=np.linspace(0.0, 1.0, xdim, dtype=np.float32), - lat=np.linspace(0.0, 1.0, ydim, dtype=np.float32), - ) - P2 = Field( - "P2", - 0.2 * np.ones((ydim, xdim), dtype=np.float32), - lon=np.linspace(0.0, 2.0, xdim, dtype=np.float32), - lat=np.linspace(0.0, 2.0, ydim, dtype=np.float32), - ) - P = NestedField("P", [P1, P2]) - fieldset.add_field(P) - - def Recover(particle, fieldset, time): # pragma: no cover - if particle.state == StatusCode.ErrorOutOfBounds: - particle.dlon = 0 - particle.dlat = 0 - particle.ddepth = 0 - particle.lon = 0 - particle.lat = 0 - particle.p = 999 - particle.state = StatusCode.Evaluate - - pset = ParticleSet(fieldset, pclass=pclass(), lon=[0], lat=[0.3]) - pset.execute(AdvectionRK4 + pset.Kernel(SampleP), runtime=2, dt=1) - assert np.isclose(pset.lat[0], 0.5) - assert np.isclose(pset.p[0], 0.1) - pset = ParticleSet(fieldset, pclass=pclass(), lon=[0], lat=[1.1]) - pset.execute(AdvectionRK4 + pset.Kernel(SampleP), runtime=2, dt=1) - assert np.isclose(pset.lat[0], 1.5) - assert np.isclose(pset.p[0], 0.2) - pset = ParticleSet(fieldset, pclass=pclass(), lon=[0], lat=[2.3]) - pset.execute(pset.Kernel(AdvectionRK4) + SampleP + Recover, runtime=1, dt=1) - assert np.isclose(pset.lat[0], 0) - assert np.isclose(pset.p[0], 999) - assert np.allclose(fieldset.UV[0][0, 0, 0, 0], [0.1, 0.2]) - - def test_fieldset_sampling_updating_order(tmp_zarrfile): def calc_p(t, y, x): return 10 * t + x + 0.2 * y From bf6e4c649625935fd5b1b37d5a7da1f0d4f887d7 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 15 Dec 2025 08:58:00 +0100 Subject: [PATCH 3/4] Adding Particle Advection to nesting tutorial --- docs/examples/documentation_nesting.ipynb | 108 ++++++++++++++++++++++ 1 file changed, 108 insertions(+) diff --git a/docs/examples/documentation_nesting.ipynb b/docs/examples/documentation_nesting.ipynb index 0710f7ef3..81e03462d 100644 --- a/docs/examples/documentation_nesting.ipynb +++ b/docs/examples/documentation_nesting.ipynb @@ -357,6 +357,114 @@ "plt.tight_layout()\n", "plt.show()" ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "## Advecting particles with nest transitions\n", + "\n", + "We can now set up a particle advection simulation using the nested grids. We first combine all the Fields into a single FieldSet. \n", + "\n", + "We rename the individual Fields by appending the nest index to their names, so that we can easily identify which Field belongs to which nest. We also add the `NestID` Field to the FieldSet (note that Parcels v4 supports combining structured and unstructured Fields into one FieldSet, which is very convenient for this usecase)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "fields = [NestID]\n", + "for i, ds in enumerate(ds_in.values()):\n", + " # TODO : remove depth dimension when Parcels supports 2D copernicusmarine datasets\n", + " ds = ds.assign_coords(depth=np.array([0]))\n", + " ds[\"depth\"].attrs[\"axis\"] = \"Z\"\n", + "\n", + " fset = parcels.FieldSet.from_copernicusmarine(ds)\n", + " for fld in fset.fields.values():\n", + " fld.name = f\"{fld.name}{i}\"\n", + " fields.append(fld)\n", + "fieldset = parcels.FieldSet(fields)" + ] + }, + { + "cell_type": "markdown", + "id": "21", + "metadata": {}, + "source": [ + "We then define a custom Advection kernel that advects particles using the appropriate velocity Field based on the `NestID` at the particle's location. Note that for simplicity, we use Eulerian advection here, but in a real-world application you would typically use a higher-order scheme." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22", + "metadata": {}, + "outputs": [], + "source": [ + "def AdvectEE_Nests(particles, fieldset):\n", + " particles.nestID = fieldset.NestID[particles]\n", + "\n", + " # TODO because of KernelParticle bug (GH #2143), we need to copy lon/lat/time to local variables\n", + " time = particles.time\n", + " z = particles.z\n", + " lat = particles.lat\n", + " lon = particles.lon\n", + " u = np.zeros_like(particles.lon)\n", + " v = np.zeros_like(particles.lat)\n", + "\n", + " unique_ids = np.unique(particles.nestID)\n", + " for nid in unique_ids:\n", + " mask = particles.nestID == nid\n", + " UVField = getattr(fieldset, f\"UV{nid}\")\n", + " (u[mask], v[mask]) = UVField[time[mask], z[mask], lat[mask], lon[mask]]\n", + "\n", + " particles.dlon += u * particles.dt\n", + " particles.dlat += v * particles.dt\n", + "\n", + "\n", + "pset = parcels.ParticleSet(\n", + " fieldset, pclass=NestParticle, lon=X.flatten(), lat=Y.flatten()\n", + ")\n", + "ofile = parcels.ParticleFile(\"nest_particles.zarr\", outputdt=np.timedelta64(1, \"h\"))\n", + "pset.execute(\n", + " AdvectEE_Nests,\n", + " runtime=np.timedelta64(24, \"h\"),\n", + " dt=np.timedelta64(1, \"h\"),\n", + " output_file=ofile,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "23", + "metadata": {}, + "source": [ + "And finally we plot the particles moving through the nested grids, confirming that they correctly transition between the nests based on their location." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(1, 1, figsize=(10, 5))\n", + "\n", + "ds_out = xr.open_zarr(\"nest_particles.zarr\")\n", + "\n", + "cmap = plt.get_cmap(\"viridis\", 3)\n", + "sc = ax.scatter(ds_out.lon, ds_out.lat, c=ds_out.nestID, s=4, cmap=cmap, vmin=0, vmax=2)\n", + "\n", + "cbar = plt.colorbar(sc, ticks=[0, 1, 2], ax=ax)\n", + "cbar.set_label(\"Nest ID\")\n", + "ax.set_title(\"Particle advection through nests\")\n", + "plt.show()" + ] } ], "metadata": { From 94e8658166b5db6e93b7e1297d9217f5ea30a9b3 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 15 Dec 2025 10:27:46 +0100 Subject: [PATCH 4/4] Fixing ErrorParticles and improving final plot --- docs/examples/documentation_nesting.ipynb | 36 ++++++++++++++++++++--- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/docs/examples/documentation_nesting.ipynb b/docs/examples/documentation_nesting.ipynb index 81e03462d..09f70115f 100644 --- a/docs/examples/documentation_nesting.ipynb +++ b/docs/examples/documentation_nesting.ipynb @@ -259,7 +259,7 @@ " face_poly[np.newaxis, np.newaxis, :],\n", " {\n", " \"long_name\": \"Nest id\",\n", - " \"description\": \"0=regional, 1=coastal\",\n", + " \"description\": \"2=regional, 1=coastal, 0=harbour\",\n", " \"location\": \"face\",\n", " \"mesh\": \"delaunay\",\n", " },\n", @@ -425,14 +425,31 @@ " particles.dlon += u * particles.dt\n", " particles.dlat += v * particles.dt\n", "\n", + " # TODO particle states have to be updated manually because UVField is not called with `particles` argument (becaise of GH #2143)\n", + " particles.state = np.where(\n", + " np.isnan(u) | np.isnan(v),\n", + " parcels.StatusCode.ErrorInterpolation,\n", + " particles.state,\n", + " )\n", + "\n", + "\n", + "def DeleteErrorParticles(particles, fieldset):\n", + " any_error = particles.state >= 50 # This captures all Errors\n", + " particles[any_error].state = parcels.StatusCode.Delete\n", + "\n", + "\n", + "X, Y = np.meshgrid(\n", + " np.linspace(1.22, 1.26, 5),\n", + " np.linspace(41.02, 41.08, 4),\n", + ")\n", "\n", "pset = parcels.ParticleSet(\n", " fieldset, pclass=NestParticle, lon=X.flatten(), lat=Y.flatten()\n", ")\n", "ofile = parcels.ParticleFile(\"nest_particles.zarr\", outputdt=np.timedelta64(1, \"h\"))\n", "pset.execute(\n", - " AdvectEE_Nests,\n", - " runtime=np.timedelta64(24, \"h\"),\n", + " [AdvectEE_Nests, DeleteErrorParticles],\n", + " runtime=np.timedelta64(5, \"D\") - np.timedelta64(1, \"h\"),\n", " dt=np.timedelta64(1, \"h\"),\n", " output_file=ofile,\n", ")" @@ -453,16 +470,27 @@ "metadata": {}, "outputs": [], "source": [ - "fig, ax = plt.subplots(1, 1, figsize=(10, 5))\n", + "fig, ax = plt.subplots(1, 1, figsize=(10, 3))\n", "\n", "ds_out = xr.open_zarr(\"nest_particles.zarr\")\n", "\n", "cmap = plt.get_cmap(\"viridis\", 3)\n", + "plt.plot(ds_out.lon.T, ds_out.lat.T, \"k\", linewidth=0.5)\n", "sc = ax.scatter(ds_out.lon, ds_out.lat, c=ds_out.nestID, s=4, cmap=cmap, vmin=0, vmax=2)\n", + "xl, yl = ax.get_xlim(), ax.get_ylim()\n", + "\n", + "for rect in rectangle.values():\n", + " ax.plot(\n", + " np.append(rect[:, 0], rect[0, 0]), np.append(rect[:, 1], rect[0, 1]), \"-r\", lw=1\n", + " )\n", + "ax.set_xlim(xl)\n", + "ax.set_ylim(yl)\n", + "ax.set_aspect(\"equal\")\n", "\n", "cbar = plt.colorbar(sc, ticks=[0, 1, 2], ax=ax)\n", "cbar.set_label(\"Nest ID\")\n", "ax.set_title(\"Particle advection through nests\")\n", + "plt.tight_layout\n", "plt.show()" ] }