From a2d71bce4f5044daca56443f8ef06e504a9e4a0d Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 17 Jul 2025 08:03:08 +0200 Subject: [PATCH 01/11] Fixing bug where conversion was not applied in field.eval --- parcels/field.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index dba61dbd6..68d6be88e 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -262,17 +262,14 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): if np.isnan(value): # Detect Out-of-bounds sampling and raise exception _raise_field_out_of_bound_error(z, y, x) - else: - return value except (FieldSamplingError, FieldOutOfBoundError, FieldOutOfBoundSurfaceError) as e: e.add_note(f"Error interpolating field '{self.name}'.") raise e if applyConversion: - return self.units.to_target(value, z, y, x) - else: - return value + value = self.units.to_target(value, z, y, x) + return value def __getitem__(self, key): self._check_velocitysampling() @@ -355,7 +352,6 @@ def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): else: (u, v, w) = self._vector_interp_method(self, ti, position, time, z, y, x) - # print(u,v) if applyConversion: u = self.U.units.to_target(u, z, y, x) v = self.V.units.to_target(v, z, y, x) From d1a2e79ca223e0a499b6ef61225bc7e7329e2381 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 17 Jul 2025 08:03:32 +0200 Subject: [PATCH 02/11] fixing warning in searching length-1 arrays --- parcels/xgrid.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index 2cfda507e..b307469e9 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -395,6 +395,8 @@ def _search_1d_array( float Barycentric coordinate. """ + if len(arr) < 2: + return 0, 0.0 i = np.argmin(arr <= x) - 1 bcoord = (x - arr[i]) / (arr[i + 1] - arr[i]) return i, bcoord From abcb6b224a091de39ff59a2f366aafc504fb9fbd Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 17 Jul 2025 17:49:28 +0200 Subject: [PATCH 03/11] Reverting back to float for time and dt inside kernel loop --- parcels/kernel.py | 24 +++++++++++------------- parcels/particleset.py | 30 +++++++++++++++++++----------- 2 files changed, 30 insertions(+), 24 deletions(-) diff --git a/parcels/kernel.py b/parcels/kernel.py index 4b55a7634..6abb8d780 100644 --- a/parcels/kernel.py +++ b/parcels/kernel.py @@ -308,7 +308,7 @@ def execute(self, pset, endtime, dt): """Execute this Kernel over a ParticleSet for several timesteps.""" pset._data["state"][:] = StatusCode.Evaluate - if abs(dt) < np.timedelta64(1000, "ns"): # TODO still needed? + if abs(dt) < 1e-6: warnings.warn( "'dt' is too small, causing numerical accuracy limit problems. Please chose a higher 'dt' and rather scale the 'time' axis of the field accordingly. (related issue #762)", RuntimeWarning, @@ -381,23 +381,21 @@ def evaluate_particle(self, p, endtime): while p.state in [StatusCode.Evaluate, StatusCode.Repeat]: pre_dt = p.dt - sign_dt = np.sign(p.dt).astype(int) - if sign_dt * (endtime - p.time_nextloop) <= np.timedelta64(0, "ns"): + sign_dt = np.sign(p.dt) + if sign_dt * p.time_nextloop >= sign_dt * endtime: return p - # TODO implement below later again - # try: # Use next_dt from AdvectionRK45 if it is set - # if abs(endtime - p.time_nextloop) < abs(p.next_dt) - 1e-6: - # p.next_dt = abs(endtime - p.time_nextloop) * sign_dt - # except AttributeError: - if abs(endtime - p.time_nextloop) <= abs(p.dt): - p.dt = abs(endtime - p.time_nextloop) * sign_dt + try: # Use next_dt from AdvectionRK45 if it is set + if abs(endtime - p.time_nextloop) < abs(p.next_dt) - 1e-6: + p.next_dt = abs(endtime - p.time_nextloop) * sign_dt + except KeyError: + if abs(endtime - p.time_nextloop) < abs(p.dt) - 1e-6: + p.dt = abs(endtime - p.time_nextloop) * sign_dt res = self._pyfunc(p, self._fieldset, p.time_nextloop) if res is None: - if p.state == StatusCode.Success: - if sign_dt * (p.time - endtime) > np.timedelta64(0, "ns"): - p.state = StatusCode.Evaluate + if sign_dt * p.time < sign_dt * endtime and p.state == StatusCode.Success: + p.state = StatusCode.Evaluate else: p.state = res diff --git a/parcels/particleset.py b/parcels/particleset.py index 993741d42..6a9fadaca 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -109,9 +109,11 @@ def __init__( assert lon.size == lat.size and lon.size == depth.size, "lon, lat, depth don't all have the same lenghts" if time is None or len(time) == 0: - time = np.datetime64("NaT", "ns") # do not set a time yet (because sign_dt not known) - elif type(time[0]) in [np.datetime64, np.timedelta64]: - pass # already in the right format + time = np.array([np.nan]) # do not set a time yet (because sign_dt not known) + elif type(time[0]) is np.datetime64: + time = time - self.fieldset.time_interval.left + elif type(time[0]) is np.timedelta64: + time = time / np.timedelta64(1, "s") else: raise TypeError("particle time must be a datetime, timedelta, or date object") time = np.repeat(time, lon.size) if time.size == 1 else time @@ -141,7 +143,7 @@ def __init__( "lat": (["trajectory"], lat.astype(lonlatdepth_dtype)), "depth": (["trajectory"], depth.astype(lonlatdepth_dtype)), "time": (["trajectory"], time), - "dt": (["trajectory"], np.timedelta64(1, "ns") * np.ones(len(trajectory_ids))), + "dt": (["trajectory"], np.ones(len(trajectory_ids), dtype=np.float64)), "ei": (["trajectory", "ngrid"], np.zeros((len(trajectory_ids), len(fieldset.gridset)), dtype=np.int32)), "state": (["trajectory"], np.zeros((len(trajectory_ids)), dtype=np.int32)), "lon_nextloop": (["trajectory"], lon.astype(lonlatdepth_dtype)), @@ -727,7 +729,10 @@ def execute( if (dt is not None) and (not isinstance(dt, np.timedelta64)): raise TypeError("dt must be a np.timedelta64 object") if dt is None or np.isnat(dt): - dt = np.timedelta64(1, "s") + dt = 1 + else: + dt = dt / np.timedelta64(1, "s") + self._data["dt"][:] = dt sign_dt = np.sign(dt).astype(int) if sign_dt not in [-1, 1]: @@ -745,7 +750,7 @@ def execute( raise TypeError("The runtime must be a np.timedelta64 object") else: - if not np.isnat(self._data["time_nextloop"]).any(): + if not np.isnan(self._data["time_nextloop"]).any(): if sign_dt > 0: start_time = self._data["time_nextloop"].min().values else: @@ -778,8 +783,11 @@ def execute( else: end_time = start_time + runtime * sign_dt + start_time = (start_time - self.fieldset.time_interval.left) / np.timedelta64(1, "s") + end_time = (end_time - self.fieldset.time_interval.left) / np.timedelta64(1, "s") + # Set the time of the particles if it hadn't been set on initialisation - if np.isnat(self._data["time"]).any(): + if np.isnan(self._data["time"]).any(): self._data["time"][:] = start_time self._data["time_nextloop"][:] = start_time @@ -790,7 +798,7 @@ def execute( logger.info(f"Output files are stored in {output_file.fname}.") if verbose_progress: - pbar = tqdm(total=(end_time - start_time) / np.timedelta64(1, "s"), file=sys.stdout) + pbar = tqdm(total=(end_time - start_time), file=sys.stdout) next_output = outputdt if output_file else None @@ -813,7 +821,7 @@ def execute( next_output += outputdt if verbose_progress: - pbar.update(dt / np.timedelta64(1, "s")) + pbar.update(dt) time = next_time @@ -835,13 +843,13 @@ def _warn_outputdt_release_desync(outputdt: float, starttime: float, release_tim def _warn_particle_times_outside_fieldset_time_bounds(release_times: np.ndarray, time: TimeInterval): if np.any(release_times): - if np.any(release_times < time.left): + if np.any(release_times < 0): warnings.warn( "Some particles are set to be released outside the FieldSet's executable time domain.", ParticleSetWarning, stacklevel=2, ) - if np.any(release_times > time.right): + if np.any(release_times > (time.right - time.left) / np.timedelta64(1, "s")): warnings.warn( "Some particles are set to be released after the fieldset's last time and the fields are not constant in time.", ParticleSetWarning, From 5b6cd83836a4a256a87e73ab0d9b6b757a30fb39 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 21 Jul 2025 12:34:10 +0200 Subject: [PATCH 04/11] speeding up time_search by calling numpy arrays directly --- parcels/_index_search.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 4638b1848..dcb42e110 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -44,28 +44,28 @@ def _search_time_index(field: Field, time: datetime): if time not in field.time_interval: _raise_time_extrapolation_error(time, field=None) - time_index = field.data.time <= time + time_index = field.data.time.data <= time if time_index.all(): # If given time > last known field time, use # the last field frame without interpolation - ti = len(field.data.time) - 1 - + ti = len(field.data.time.data) - 1 elif np.logical_not(time_index).all(): # If given time < any time in the field, use # the first field frame without interpolation ti = 0 else: ti = int(time_index.argmin() - 1) if time_index.any() else 0 - if len(field.data.time) == 1: + + if len(field.data.time.data) == 1: tau = 0 - elif ti == len(field.data.time) - 1: + elif ti == len(field.data.time.data) - 1: tau = 1 else: tau = ( - (time - field.data.time[ti]).dt.total_seconds() - / (field.data.time[ti + 1] - field.data.time[ti]).dt.total_seconds() - if field.data.time[ti] != field.data.time[ti + 1] + (time - field.data.time.data[ti]) + / (field.data.time.data[ti + 1] - field.data.time.data[ti]) + if field.data.time.data[ti] != field.data.time.data[ti + 1] else 0 ) return tau, ti From 49dc841c9e74b8bdf5616351ad510b275fae3eee Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Mon, 21 Jul 2025 12:42:20 +0200 Subject: [PATCH 05/11] Also using numpy array of dataset in xgrid.search --- parcels/xgrid.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parcels/xgrid.py b/parcels/xgrid.py index b307469e9..b530ebcf2 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -212,11 +212,11 @@ def _gtype(self): def search(self, z, y, x, ei=None): ds = self.xgcm_grid._ds - zi, zeta = _search_1d_array(ds.depth.values, z) + zi, zeta = _search_1d_array(ds.depth.data, z) if ds.lon.ndim == 1: - yi, eta = _search_1d_array(ds.lat.values, y) - xi, xsi = _search_1d_array(ds.lon.values, x) + yi, eta = _search_1d_array(ds.lat.data, y) + xi, xsi = _search_1d_array(ds.lon.data, x) return {"Z": (zi, zeta), "Y": (yi, eta), "X": (xi, xsi)} yi, xi = None, None From 4e58b2a39c4d7ddc0307afa4b2355a36c3760527 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 13:42:36 +0000 Subject: [PATCH 06/11] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- parcels/_index_search.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index dcb42e110..80cb8f570 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -63,8 +63,7 @@ def _search_time_index(field: Field, time: datetime): tau = 1 else: tau = ( - (time - field.data.time.data[ti]) - / (field.data.time.data[ti + 1] - field.data.time.data[ti]) + (time - field.data.time.data[ti]) / (field.data.time.data[ti + 1] - field.data.time.data[ti]) if field.data.time.data[ti] != field.data.time.data[ti + 1] else 0 ) From d8e6defc6705189ad5f47c1f49e54ebb0d671b05 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 22 Jul 2025 17:18:59 +0200 Subject: [PATCH 07/11] Adding rudimentary computeTimeChunck functionality in particleset.execute --- parcels/field.py | 18 +++++++++--------- parcels/particleset.py | 10 ++++++++-- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 68d6be88e..63a680e52 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -147,7 +147,7 @@ def __init__( _assert_compatible_combination(data, grid) self.name = name - self.data = data + self.data_full = data self.grid = grid try: @@ -186,7 +186,7 @@ def __init__( else: raise ValueError("Unsupported mesh type in data array attributes. Choose either: 'spherical' or 'flat'") - if "time" not in self.data.dims: + if "time" not in data.dims: raise ValueError("Field is missing a 'time' dimension. ") @property @@ -201,27 +201,27 @@ def units(self, value): @property def xdim(self): - if type(self.data) is xr.DataArray: + if type(self.data_full) is xr.DataArray: return self.grid.xdim else: raise NotImplementedError("xdim not implemented for unstructured grids") @property def ydim(self): - if type(self.data) is xr.DataArray: + if type(self.data_full) is xr.DataArray: return self.grid.ydim else: raise NotImplementedError("ydim not implemented for unstructured grids") @property def zdim(self): - if type(self.data) is xr.DataArray: + if type(self.data_full) is xr.DataArray: return self.grid.zdim else: - if "nz1" in self.data.dims: - return self.data.sizes["nz1"] - elif "nz" in self.data.dims: - return self.data.sizes["nz"] + if "nz1" in self.data_full.dims: + return self.data_full.sizes["nz1"] + elif "nz" in self.data_full.dims: + return self.data_full.sizes["nz"] else: return 0 diff --git a/parcels/particleset.py b/parcels/particleset.py index f018a356d..79c9ad347 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -805,10 +805,16 @@ def execute( time = start_time while sign_dt * (time - end_time) < 0: + + for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields + ti = np.argmin(fld.data_full.time.data <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 + if not hasattr(fld, "data") or fld.data_full.time.data[ti] != fld.data.time.data[0]: + fld.data = fld.data_full.isel({"time": slice(ti, ti + 2)}).load() + if sign_dt > 0: - next_time = end_time # TODO update to min(next_output, end_time) when ParticleFile works + next_time = min(time + dt, end_time) else: - next_time = end_time # TODO update to max(next_output, end_time) when ParticleFile works + next_time = max(time - dt, end_time) res = self._kernel.execute(self, endtime=next_time, dt=dt) if res == StatusCode.StopAllExecution: return StatusCode.StopAllExecution From f458a027ac6cbcc02f9b4af788a8eace482c298f Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Tue, 22 Jul 2025 18:02:16 +0200 Subject: [PATCH 08/11] Updating pragmatic loadtimechunk --- parcels/_index_search.py | 31 ++---------------------- parcels/application_kernels/advection.py | 2 +- parcels/particleset.py | 6 ++++- 3 files changed, 8 insertions(+), 31 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 80cb8f570..9f649fdd4 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -38,35 +38,8 @@ def _search_time_index(field: Field, time: datetime): Note that we normalize to either the first or the last index if the sampled value is outside the time value range. """ - if field.time_interval is None: - return 0, 0 - - if time not in field.time_interval: - _raise_time_extrapolation_error(time, field=None) - - time_index = field.data.time.data <= time - - if time_index.all(): - # If given time > last known field time, use - # the last field frame without interpolation - ti = len(field.data.time.data) - 1 - elif np.logical_not(time_index).all(): - # If given time < any time in the field, use - # the first field frame without interpolation - ti = 0 - else: - ti = int(time_index.argmin() - 1) if time_index.any() else 0 - - if len(field.data.time.data) == 1: - tau = 0 - elif ti == len(field.data.time.data) - 1: - tau = 1 - else: - tau = ( - (time - field.data.time.data[ti]) / (field.data.time.data[ti + 1] - field.data.time.data[ti]) - if field.data.time.data[ti] != field.data.time.data[ti + 1] - else 0 - ) + ti = np.argmin(field._time_float <= time) - 1 + tau = (time - field._time_float[ti]) / (field._time_float[ti + 1] - field._time_float[ti]) return tau, ti diff --git a/parcels/application_kernels/advection.py b/parcels/application_kernels/advection.py index dc5b801dc..03547cb04 100644 --- a/parcels/application_kernels/advection.py +++ b/parcels/application_kernels/advection.py @@ -97,7 +97,7 @@ def AdvectionRK4_3D_CROCO(particle, fieldset, time): # pragma: no cover def AdvectionEE(particle, fieldset, time): # pragma: no cover """Advection of particles using Explicit Euler (aka Euler Forward) integration.""" - dt = particle.dt / np.timedelta64(1, "s") # noqa TODO improve API for converting dt to seconds + dt = particle.dt (u1, v1) = fieldset.UV[particle] particle_dlon += u1 * dt # noqa particle_dlat += v1 * dt # noqa diff --git a/parcels/particleset.py b/parcels/particleset.py index 1495dc0cf..6f7e16a69 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -812,10 +812,14 @@ def execute( next_output = outputdt if output_file else None time = start_time + + for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields and move to better place + fld._time_float = (fld.data_full.time.data - fld.time_interval.left)/ np.timedelta64(1, "s") + while sign_dt * (time - end_time) < 0: for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields - ti = np.argmin(fld.data_full.time.data <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 + ti = np.argmin(fld._time_float <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 if not hasattr(fld, "data") or fld.data_full.time.data[ti] != fld.data.time.data[0]: fld.data = fld.data_full.isel({"time": slice(ti, ti + 2)}).load() From e6b4f050112ff089da749b6c62386f2d0fbf16dc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Jul 2025 16:02:30 +0000 Subject: [PATCH 09/11] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- parcels/_index_search.py | 1 - parcels/particleset.py | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 9f649fdd4..eabceca7f 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -15,7 +15,6 @@ _raise_field_out_of_bound_error, _raise_field_out_of_bound_surface_error, _raise_field_sampling_error, - _raise_time_extrapolation_error, ) from .basegrid import GridType diff --git a/parcels/particleset.py b/parcels/particleset.py index 6f7e16a69..075b60ceb 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -814,10 +814,9 @@ def execute( time = start_time for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields and move to better place - fld._time_float = (fld.data_full.time.data - fld.time_interval.left)/ np.timedelta64(1, "s") + fld._time_float = (fld.data_full.time.data - fld.time_interval.left) / np.timedelta64(1, "s") while sign_dt * (time - end_time) < 0: - for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields ti = np.argmin(fld._time_float <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 if not hasattr(fld, "data") or fld.data_full.time.data[ti] != fld.data.time.data[0]: From e77866f9ba6e4b55927d58dc003e938e6cdd2516 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 23 Jul 2025 09:47:00 +0200 Subject: [PATCH 10/11] cleaning up _load_timesteps --- parcels/field.py | 13 +++++++++++++ parcels/fieldset.py | 7 +++++++ parcels/particleset.py | 7 ++----- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index 63a680e52..4a423b42b 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -242,6 +242,19 @@ def _check_velocitysampling(self): stacklevel=2, ) + def _load_timesteps(self, time): + """Load the appropriate timesteps of a field.""" + ti = np.argmin(self.data_full.time.data <= time) - 1 # TODO also implement dt < 0 + if not hasattr(self, "data"): + self.data = self.data_full.isel({"time": slice(ti, ti + 2)}).load() + elif self.data_full.time.data[ti] == self.data.time.data[1]: + self.data = xr.concat([self.data[1, :], self.data_full.isel({"time": ti + 1}).load()], dim="time") + elif self.data_full.time.data[ti] != self.data.time.data[0]: + self.data = self.data_full.isel({"time": slice(ti, ti + 2)}).load() + assert ( + len(self.data.time) == 2 + ), f"Field {self.name} has not been loaded correctly. Expected 2 timesteps, but got {len(self.data.time)}." + def eval(self, time: datetime, z, y, x, particle=None, applyConversion=True): """Interpolate field values in space and time. diff --git a/parcels/fieldset.py b/parcels/fieldset.py index c423000fc..c67d0bd8c 100644 --- a/parcels/fieldset.py +++ b/parcels/fieldset.py @@ -82,6 +82,13 @@ def time_interval(self): return None return functools.reduce(lambda x, y: x.intersection(y), time_intervals) + def _load_timesteps(self, time): + """Load the appropriate timesteps of all fields in the fieldset.""" + for fldname in self.fields: + field = self.fields[fldname] + if isinstance(field, Field): + field._load_timesteps(time) + def add_field(self, field: Field, name: str | None = None): """Add a :class:`parcels.field.Field` object to the FieldSet. diff --git a/parcels/particleset.py b/parcels/particleset.py index 79c9ad347..1e235f0e5 100644 --- a/parcels/particleset.py +++ b/parcels/particleset.py @@ -805,11 +805,8 @@ def execute( time = start_time while sign_dt * (time - end_time) < 0: - - for fld in [self.fieldset.U, self.fieldset.V]: # TODO generalise to all fields - ti = np.argmin(fld.data_full.time.data <= self._data["time_nextloop"][0]) - 1 # TODO also implement dt < 0 - if not hasattr(fld, "data") or fld.data_full.time.data[ti] != fld.data.time.data[0]: - fld.data = fld.data_full.isel({"time": slice(ti, ti + 2)}).load() + # Load the appropriate timesteps of the fieldset + self.fieldset._load_timesteps(self._data["time_nextloop"][0]) if sign_dt > 0: next_time = min(time + dt, end_time) From 5a7d7d2cb017cbe44e7bd454269b6519e6507c9e Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 23 Jul 2025 09:47:56 +0200 Subject: [PATCH 11/11] adding checks to index_search to validate bcoord between 0 and 1 --- parcels/_index_search.py | 2 ++ parcels/xgrid.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/parcels/_index_search.py b/parcels/_index_search.py index 80cb8f570..4d470488e 100644 --- a/parcels/_index_search.py +++ b/parcels/_index_search.py @@ -67,6 +67,8 @@ def _search_time_index(field: Field, time: datetime): if field.data.time.data[ti] != field.data.time.data[ti + 1] else 0 ) + if tau < 0 or tau > 1: # TODO only for debugging; test can go? + raise ValueError(f"Time {time} is out of bounds for field time data {field.data.time.data}.") return tau, ti diff --git a/parcels/xgrid.py b/parcels/xgrid.py index b530ebcf2..6ee905906 100644 --- a/parcels/xgrid.py +++ b/parcels/xgrid.py @@ -399,6 +399,8 @@ def _search_1d_array( return 0, 0.0 i = np.argmin(arr <= x) - 1 bcoord = (x - arr[i]) / (arr[i + 1] - arr[i]) + if bcoord < 0 or bcoord > 1: # TODO only for debugging; test can go? + raise ValueError(f"Position {x} is out of bounds for array {arr}.") return i, bcoord