diff --git a/docs/user-guide/assignments/Research_proposal_intro.ipynb b/docs/user-guide/assignments/Research_proposal_intro.ipynb index 88f54bc8..45f65947 100644 --- a/docs/user-guide/assignments/Research_proposal_intro.ipynb +++ b/docs/user-guide/assignments/Research_proposal_intro.ipynb @@ -150,9 +150,15 @@ "\n", "Finally, fit your equipment in the 20-foot container. The container will be sent to your port of departure ahead of time with a cargo boat, so make sure you are packed in time for this transfer. Remember there are no shops at sea, so think carefully and plan ahead. \n", "\n", - "![Equipment preparation NIOZ](https://www.nioz.nl/application/files/9116/7500/3457/2023-01-16-packing.jpg) \n", - "![Equipment loading](https://www.nioz.nl/application/files/7416/7810/2265/2023-03-06-container-shifting.jpg) " + "\n", + "" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] } ], "metadata": { diff --git a/docs/user-guide/documentation/pre_download_data.md b/docs/user-guide/documentation/pre_download_data.md index fc55f339..f37912e9 100644 --- a/docs/user-guide/documentation/pre_download_data.md +++ b/docs/user-guide/documentation/pre_download_data.md @@ -20,6 +20,10 @@ In addition, all pre-downloaded data must be split into separate files per times **Monthly data**: when using monthly data, ensure that your final .nc file download is for the month *after* your expedition schedule end date. This is to ensure that a Parcels FieldSet can be generated under-the-hood which fully covers the expedition period. For example, if your expedition runs from 1st May to 15th May, your final monthly data file should be in June. Daily data files only need to cover the expedition period exactly. ``` +```{note} +**Argo and Drifter data**: if using Argo floats or Drifters in your expedition, ensure that: 1) the temporal extent of the downloaded data also accounts for the full *lifetime* of the instruments, not just the expedition period, and 2) the spatial bounds of the downloaded data also accounts for the likely drift distance of the instruments over their lifetimes. Otherwise, simulations will end prematurely (out-of-bounds errors) when the data runs out. +``` + Further, VirtualShip expects pre-downloaded data to be organised in a specific directory & filename structure within the specified local data directory. The expected structure is as outlined in the subsequent sections. #### Directory structure diff --git a/src/virtualship/cli/_plan.py b/src/virtualship/cli/_plan.py index 08845e0d..81c8f857 100644 --- a/src/virtualship/cli/_plan.py +++ b/src/virtualship/cli/_plan.py @@ -22,10 +22,8 @@ from virtualship.cli.validator_utils import ( get_field_type, group_validators, - is_valid_depth, is_valid_lat, is_valid_lon, - is_valid_timestr, type_to_textual, ) from virtualship.errors import UnexpectedError, UserError @@ -40,12 +38,10 @@ Location, ShipConfig, ShipUnderwaterSTConfig, - SpatialRange, - TimeRange, Waypoint, XBTConfig, ) -from virtualship.utils import EXPEDITION +from virtualship.utils import EXPEDITION, _get_waypoint_latlons UNEXPECTED_MSG_ONSAVE = ( "Please ensure that:\n" @@ -142,6 +138,7 @@ def log_exception_to_file( {"name": "cycle_days"}, {"name": "drift_days"}, {"name": "stationkeeping_time", "minutes": True}, + {"name": "lifetime", "days": True}, ], }, "drifter_config": { @@ -149,7 +146,7 @@ def log_exception_to_file( "title": "Drifter", "attributes": [ {"name": "depth_meter"}, - {"name": "lifetime", "minutes": True}, + {"name": "lifetime", "days": True}, {"name": "stationkeeping_time", "minutes": True}, ], }, @@ -249,7 +246,6 @@ def compose(self) -> ComposeResult: for instrument_name, info in INSTRUMENT_FIELDS.items(): config_class = info["class"] attributes = info["attributes"] - # instrument-specific configs now live under instruments_config config_instance = getattr( self.expedition.instruments_config, instrument_name, None ) @@ -268,7 +264,10 @@ def compose(self) -> ComposeResult: with Container(classes="instrument-config"): for attr_meta in attributes: attr = attr_meta["name"] - is_minutes = attr_meta.get("minutes", False) + is_minutes, is_days = ( + attr_meta.get("minutes", False), + attr_meta.get("days", False), + ) validators = group_validators(config_class, attr) if config_instance: raw_value = getattr(config_instance, attr, "") @@ -279,16 +278,23 @@ def compose(self) -> ComposeResult: ) except AttributeError: value = str(raw_value) + elif is_days and raw_value != "": + try: + value = str( + raw_value.total_seconds() / 86400.0 + ) + except AttributeError: + value = str(raw_value) else: value = str(raw_value) else: value = "" label = f"{attr.replace('_', ' ').title()}:" - yield Label( - label - if not is_minutes - else label.replace(":", " Minutes:") - ) + if is_minutes: + label = label.replace(":", " Minutes:") + elif is_days: + label = label.replace(":", " Days:") + yield Label(label) yield Input( id=f"{instrument_name}_{attr}", type=type_to_textual( @@ -331,181 +337,6 @@ def compose(self) -> ComposeResult: yield VerticalScroll(id="waypoint_list", classes="waypoint-list") - # SECTION: "Space-Time Region" - - with Collapsible( - title="[b]Space-Time Region[/b] (advanced users only)", - collapsed=True, - ): - if self.expedition.schedule.space_time_region: - str_data = self.expedition.schedule.space_time_region - - yield Label("Minimum Latitude:") - yield Input( - id="min_lat", - value=str(str_data.spatial_range.minimum_latitude) - if str_data.spatial_range.minimum_latitude - else "", - validators=[ - Function( - is_valid_lat, - f"INVALID: value must be {is_valid_lat.__doc__.lower()}", - ) - ], - type="number", - placeholder="°N", - ) - yield Label( - "", - id="validation-failure-label-min_lat", - classes="-hidden validation-failure", - ) - - yield Label("Maximum Latitude:") - yield Input( - id="max_lat", - value=str(str_data.spatial_range.maximum_latitude), - validators=[ - Function( - is_valid_lat, - f"INVALID: value must be {is_valid_lat.__doc__.lower()}", - ) - ], - type="number", - placeholder="°N", - ) - yield Label( - "", - id="validation-failure-label-max_lat", - classes="-hidden validation-failure", - ) - - yield Label("Minimum Longitude:") - yield Input( - id="min_lon", - value=str(str_data.spatial_range.minimum_longitude), - validators=[ - Function( - is_valid_lon, - f"INVALID: value must be {is_valid_lon.__doc__.lower()}", - ) - ], - type="number", - placeholder="°E", - ) - yield Label( - "", - id="validation-failure-label-min_lon", - classes="-hidden validation-failure", - ) - - yield Label("Maximum Longitude:") - yield Input( - id="max_lon", - value=str(str_data.spatial_range.maximum_longitude), - validators=[ - Function( - is_valid_lon, - f"INVALID: value must be {is_valid_lon.__doc__.lower()}", - ) - ], - type="number", - placeholder="°E", - ) - yield Label( - "", - id="validation-failure-label-max_lon", - classes="-hidden validation-failure", - ) - - yield Label("Minimum Depth (meters):") - yield Input( - id="min_depth", - value=str(str_data.spatial_range.minimum_depth), - validators=[ - Function( - is_valid_depth, - f"INVALID: value must be {is_valid_depth.__doc__.lower()}", - ) - ], - type="number", - placeholder="m", - ) - yield Label( - "", - id="validation-failure-label-min_depth", - classes="-hidden validation-failure", - ) - - yield Label("Maximum Depth (meters):") - yield Input( - id="max_depth", - value=str(str_data.spatial_range.maximum_depth), - validators=[ - Function( - is_valid_depth, - f"INVALID: value must be {is_valid_depth.__doc__.lower()}", - ) - ], - type="number", - placeholder="m", - ) - yield Label( - "", - id="validation-failure-label-max_depth", - classes="-hidden validation-failure", - ) - - yield Label( - "Start Time (will be auto determined from waypoints if left blank):" - ) - yield Input( - id="start_time", - placeholder="YYYY-MM-DD hh:mm:ss", - value=( - str(str_data.time_range.start_time) - if str_data.time_range and str_data.time_range.start_time - else "" - ), - validators=[ - Function( - is_valid_timestr, - f"INVALID: value must be {is_valid_timestr.__doc__.lower()}", - ) - ], - type="text", - ) - yield Label( - "", - id="validation-failure-label-start_time", - classes="-hidden validation-failure", - ) - - yield Label( - "End Time (will be auto determined from waypoints if left blank):" - ) - yield Input( - id="end_time", - placeholder="YYYY-MM-DD hh:mm:ss", - value=( - str(str_data.time_range.end_time) - if str_data.time_range and str_data.time_range.end_time - else "" - ), - validators=[ - Function( - is_valid_timestr, - f"INVALID: value must be {is_valid_timestr.__doc__.lower()}", - ) - ], - type="text", - ) - yield Label( - "", - id="validation-failure-label-end_time", - classes="-hidden validation-failure", - ) - except Exception as e: raise UnexpectedError(unexpected_msg_compose(e)) from None @@ -571,11 +402,14 @@ def _update_instrument_configs(self): for attr_meta in attributes: attr = attr_meta["name"] is_minutes = attr_meta.get("minutes", False) + is_days = attr_meta.get("days", False) input_id = f"{instrument_name}_{attr}" value = self.query_one(f"#{input_id}").value field_type = get_field_type(config_class, attr) if is_minutes and field_type is datetime.timedelta: value = datetime.timedelta(minutes=float(value)) + elif is_days and field_type is datetime.timedelta: + value = datetime.timedelta(days=float(value)) else: value = field_type(value) kwargs[attr] = value @@ -592,24 +426,6 @@ def _update_instrument_configs(self): ) def _update_schedule(self): - start_time_input = self.query_one("#start_time").value - end_time_input = self.query_one("#end_time").value - waypoint_times = [ - wp.time - for wp in self.expedition.schedule.waypoints - if hasattr(wp, "time") and wp.time - ] - if not start_time_input and waypoint_times: - start_time = min(waypoint_times) - else: - start_time = start_time_input - if not end_time_input and waypoint_times: - end_time = max(waypoint_times) + datetime.timedelta(minutes=60480.0) - else: - end_time = end_time_input - time_range = TimeRange(start_time=start_time, end_time=end_time) - self.expedition.schedule.space_time_region.time_range = time_range - for i, wp in enumerate(self.expedition.schedule.waypoints): wp.location = Location( latitude=float(self.query_one(f"#wp{i}_lat").value), @@ -634,57 +450,6 @@ def _update_schedule(self): elif switch_on: wp.instrument.append(instrument) - # take min/max lat/lon to be most extreme values of waypoints or space_time_region inputs (so as to cover possibility of user edits in either place) - # also prevents situation where e.g. user defines a space time region inconsistent with waypoint locations and vice versa (warning also provided) - waypoint_lats = [ - wp.location.latitude for wp in self.expedition.schedule.waypoints - ] - waypoint_lons = [ - wp.location.longitude for wp in self.expedition.schedule.waypoints - ] - wp_min_lat, wp_max_lat = ( - min(waypoint_lats) if waypoint_lats else -90.0, - max(waypoint_lats) if waypoint_lats else 90.0, - ) - wp_min_lon, wp_max_lon = ( - min(waypoint_lons) if waypoint_lons else -180.0, - max(waypoint_lons) if waypoint_lons else 180.0, - ) - - st_reg_min_lat = float(self.query_one("#min_lat").value) - st_reg_max_lat = float(self.query_one("#max_lat").value) - st_reg_min_lon = float(self.query_one("#min_lon").value) - st_reg_max_lon = float(self.query_one("#max_lon").value) - - min_lat = min(wp_min_lat, st_reg_min_lat) - max_lat = max(wp_max_lat, st_reg_max_lat) - min_lon = min(wp_min_lon, st_reg_min_lon) - max_lon = max(wp_max_lon, st_reg_max_lon) - - spatial_range = SpatialRange( - minimum_longitude=min_lon, - maximum_longitude=max_lon, - minimum_latitude=min_lat, - maximum_latitude=max_lat, - minimum_depth=self.query_one("#min_depth").value, - maximum_depth=self.query_one("#max_depth").value, - ) - self.expedition.schedule.space_time_region.spatial_range = spatial_range - - # provide warning if user defines a space time region inconsistent with waypoint locations - if ( - (wp_min_lat < st_reg_min_lat) - or (wp_max_lat > st_reg_max_lat) - or (wp_min_lon < st_reg_min_lon) - or (wp_max_lon > st_reg_max_lon) - ): - self.notify( - "[b]WARNING[/b]. One or more waypoint locations lie outside the defined space-time region. Take care if manually adjusting the space-time region." - "\n\nThe space-time region will be automatically adjusted on saving to include all waypoint locations.", - severity="warning", - timeout=10, - ) - @on(Input.Changed) def show_invalid_reasons(self, event: Input.Changed) -> None: input_id = event.input.id @@ -1089,9 +854,11 @@ def save_pressed(self) -> None: self.sync_ui_waypoints() # call to ensure waypoint inputs are synced # verify schedule + wp_lats, wp_lons = _get_waypoint_latlons( + expedition_editor.expedition.schedule.waypoints + ) expedition_editor.expedition.schedule.verify( ship_speed_value, - check_space_time_region=True, ignore_land_test=True, ) diff --git a/src/virtualship/cli/validator_utils.py b/src/virtualship/cli/validator_utils.py index 402e48b1..4aaf9389 100644 --- a/src/virtualship/cli/validator_utils.py +++ b/src/virtualship/cli/validator_utils.py @@ -41,35 +41,6 @@ def is_valid_lon(value: str) -> bool: return -180 < v < 360 -@require_docstring -def is_valid_depth(value: str) -> bool: - """Float.""" - try: - v = float(value) - except ValueError: - return None - - # NOTE: depth model in space_time_region.py ONLY specifies that depth must be float (and no conditions < 0) - # NOTE: therefore, this condition is carried forward here to match what currently exists - # NOTE: however, there is a TODO in space_time_region.py to add conditions as Pydantic Field - # TODO: update validator here if/when depth model is updated in space_time_region.py - return isinstance(v, float) - - -@require_docstring -def is_valid_timestr(value: str) -> bool: - """Format YYYY-MM-DD hh:mm:ss.""" - if ( - not value.strip() - ): # return as valid if blank, UI logic will auto fill on save if so - return True - try: - datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S") - return True - except Exception: - return False - - # SHIP CONFIG INPUTS VALIDATION FIELD_CONSTRAINT_ATTRS = ( diff --git a/src/virtualship/instruments/adcp.py b/src/virtualship/instruments/adcp.py index 2a761e14..17797a41 100644 --- a/src/virtualship/instruments/adcp.py +++ b/src/virtualship/instruments/adcp.py @@ -57,6 +57,9 @@ class ADCPInstrument(Instrument): def __init__(self, expedition, from_data): """Initialize ADCPInstrument.""" variables = {"U": "uo", "V": "vo"} + limit_spec = { + "spatial": True + } # spatial limits; lat/lon constrained to waypoint locations + buffer super().__init__( expedition, @@ -65,7 +68,7 @@ def __init__(self, expedition, from_data): allow_time_extrapolation=True, verbose_progress=False, spacetime_buffer_size=None, - limit_spec=None, + limit_spec=limit_spec, from_data=from_data, ) diff --git a/src/virtualship/instruments/argo_float.py b/src/virtualship/instruments/argo_float.py index 2532907a..1c697852 100644 --- a/src/virtualship/instruments/argo_float.py +++ b/src/virtualship/instruments/argo_float.py @@ -167,7 +167,11 @@ def __init__(self, expedition, from_data): variables = {"U": "uo", "V": "vo", "S": "so", "T": "thetao"} spacetime_buffer_size = { "latlon": 3.0, # [degrees] - "time": 21.0, # [days] + "time": expedition.instruments_config.argo_float_config.lifetime.total_seconds() + / (24 * 3600), # [days] + } + limit_spec = { + "spatial": True, # spatial limits; lat/lon constrained to waypoint locations + buffer } super().__init__( @@ -177,7 +181,7 @@ def __init__(self, expedition, from_data): allow_time_extrapolation=False, verbose_progress=True, spacetime_buffer_size=spacetime_buffer_size, - limit_spec=None, + limit_spec=limit_spec, from_data=from_data, ) @@ -185,7 +189,6 @@ def simulate(self, measurements, out_path) -> None: """Simulate Argo float measurements.""" DT = 10.0 # dt of Argo float simulation integrator OUTPUT_DT = timedelta(minutes=5) - ENDTIME = None if len(measurements) == 0: print( @@ -235,15 +238,8 @@ def simulate(self, measurements, out_path) -> None: chunks=[len(argo_float_particleset), 100], ) - # get earliest between fieldset end time and provide end time - fieldset_endtime = fieldset.time_origin.fulltime(fieldset.U.grid.time_full[-1]) - if ENDTIME is None: - actual_endtime = fieldset_endtime - elif ENDTIME > fieldset_endtime: - print("WARN: Requested end time later than fieldset end time.") - actual_endtime = fieldset_endtime - else: - actual_endtime = np.timedelta64(ENDTIME) + # endtime + endtime = fieldset.time_origin.fulltime(fieldset.U.grid.time_full[-1]) # execute simulation argo_float_particleset.execute( @@ -253,7 +249,7 @@ def simulate(self, measurements, out_path) -> None: _keep_at_surface, _check_error, ], - endtime=actual_endtime, + endtime=endtime, dt=DT, output_file=out_file, verbose_progress=self.verbose_progress, diff --git a/src/virtualship/instruments/base.py b/src/virtualship/instruments/base.py index 22b0b54a..984e4abf 100644 --- a/src/virtualship/instruments/base.py +++ b/src/virtualship/instruments/base.py @@ -3,6 +3,7 @@ import abc from collections import OrderedDict from datetime import timedelta +from itertools import pairwise from pathlib import Path from typing import TYPE_CHECKING @@ -17,6 +18,7 @@ _find_files_in_timerange, _find_nc_file_with_variable, _get_bathy_data, + _get_waypoint_latlons, _select_product_id, ship_spinner, ) @@ -56,6 +58,19 @@ def __init__( self.spacetime_buffer_size = spacetime_buffer_size self.limit_spec = limit_spec + wp_lats, wp_lons = _get_waypoint_latlons(expedition.schedule.waypoints) + wp_times = [ + wp.time for wp in expedition.schedule.waypoints if wp.time is not None + ] + assert all(earlier <= later for earlier, later in pairwise(wp_times)), ( + "Waypoint times are not in ascending order" + ) + self.wp_times = wp_times + + self.min_time, self.max_time = wp_times[0], wp_times[-1] + self.min_lat, self.max_lat = min(wp_lats), max(wp_lats) + self.min_lon, self.max_lon = min(wp_lons), max(wp_lons) + def load_input_data(self) -> FieldSet: """Load and return the input data as a FieldSet for the instrument.""" try: @@ -76,10 +91,10 @@ def load_input_data(self) -> FieldSet: # bathymetry data if self.add_bathymetry: bathymetry_field = _get_bathy_data( - self.expedition.schedule.space_time_region, - latlon_buffer=self.spacetime_buffer_size.get("latlon") - if self.spacetime_buffer_size - else None, + self.min_lat, + self.max_lat, + self.min_lon, + self.max_lon, from_data=self.from_data, ).bathymetry bathymetry_field.data = -bathymetry_field.data @@ -115,36 +130,39 @@ def execute(self, measurements: list, out_path: str | Path) -> None: def _get_copernicus_ds( self, + time_buffer: float | None, physical: bool, var: str, ) -> xr.Dataset: """Get Copernicus Marine dataset for direct ingestion.""" product_id = _select_product_id( physical=physical, - schedule_start=self.expedition.schedule.space_time_region.time_range.start_time, - schedule_end=self.expedition.schedule.space_time_region.time_range.end_time, + schedule_start=self.min_time, + schedule_end=self.max_time, variable=var if not physical else None, ) - latlon_buffer = self._get_spec_value("buffer", "latlon", 0.0) - time_buffer = self._get_spec_value("buffer", "time", 0.0) + latlon_buffer = self._get_spec_value( + "buffer", "latlon", 0.25 + ) # [degrees]; default 0.25 deg buffer to ensure coverage in field cell edge cases depth_min = self._get_spec_value("limit", "depth_min", None) depth_max = self._get_spec_value("limit", "depth_max", None) + spatial_constraint = self._get_spec_value("limit", "spatial", True) + + min_lon_bound = self.min_lon - latlon_buffer if spatial_constraint else None + max_lon_bound = self.max_lon + latlon_buffer if spatial_constraint else None + min_lat_bound = self.min_lat - latlon_buffer if spatial_constraint else None + max_lat_bound = self.max_lat + latlon_buffer if spatial_constraint else None return copernicusmarine.open_dataset( dataset_id=product_id, - minimum_longitude=self.expedition.schedule.space_time_region.spatial_range.minimum_longitude - - latlon_buffer, - maximum_longitude=self.expedition.schedule.space_time_region.spatial_range.maximum_longitude - + latlon_buffer, - minimum_latitude=self.expedition.schedule.space_time_region.spatial_range.minimum_latitude - - latlon_buffer, - maximum_latitude=self.expedition.schedule.space_time_region.spatial_range.maximum_latitude - + latlon_buffer, + minimum_longitude=min_lon_bound, + maximum_longitude=max_lon_bound, + minimum_latitude=min_lat_bound, + maximum_latitude=max_lat_bound, variables=[var], - start_datetime=self.expedition.schedule.space_time_region.time_range.start_time, - end_datetime=self.expedition.schedule.space_time_region.time_range.end_time - + timedelta(days=time_buffer), + start_datetime=self.min_time, + end_datetime=self.max_time + timedelta(days=time_buffer), minimum_depth=depth_min, maximum_depth=depth_max, coordinates_selection_method="outside", @@ -159,6 +177,8 @@ def _generate_fieldset(self) -> FieldSet: fieldsets_list = [] keys = list(self.variables.keys()) + time_buffer = self._get_spec_value("buffer", "time", 0.0) + for key in keys: var = self.variables[key] if self.from_data is not None: # load from local data @@ -168,17 +188,10 @@ def _generate_fieldset(self) -> FieldSet: else: data_dir = self.from_data.joinpath("bgc") - schedule_start = ( - self.expedition.schedule.space_time_region.time_range.start_time - ) - schedule_end = ( - self.expedition.schedule.space_time_region.time_range.end_time - ) - files = _find_files_in_timerange( data_dir, - schedule_start, - schedule_end, + self.min_time, + self.max_time + timedelta(days=time_buffer), ) _, full_var_name = _find_nc_file_with_variable( @@ -197,7 +210,11 @@ def _generate_fieldset(self) -> FieldSet: ) else: # stream via Copernicus Marine Service physical = var in COPERNICUSMARINE_PHYS_VARIABLES - ds = self._get_copernicus_ds(physical=physical, var=var) + ds = self._get_copernicus_ds( + time_buffer, + physical=physical, + var=var, + ) fs = FieldSet.from_xarray_dataset( ds, {key: var}, self.dimensions, mesh="spherical" ) diff --git a/src/virtualship/instruments/ctd.py b/src/virtualship/instruments/ctd.py index 73248cf9..eb780d3e 100644 --- a/src/virtualship/instruments/ctd.py +++ b/src/virtualship/instruments/ctd.py @@ -82,6 +82,9 @@ class CTDInstrument(Instrument): def __init__(self, expedition, from_data): """Initialize CTDInstrument.""" variables = {"S": "so", "T": "thetao"} + limit_spec = { + "spatial": True + } # spatial limits; lat/lon constrained to waypoint locations + buffer super().__init__( expedition, @@ -90,7 +93,7 @@ def __init__(self, expedition, from_data): allow_time_extrapolation=True, verbose_progress=False, spacetime_buffer_size=None, - limit_spec=None, + limit_spec=limit_spec, from_data=from_data, ) diff --git a/src/virtualship/instruments/ctd_bgc.py b/src/virtualship/instruments/ctd_bgc.py index fab9e07b..221cfa12 100644 --- a/src/virtualship/instruments/ctd_bgc.py +++ b/src/virtualship/instruments/ctd_bgc.py @@ -112,6 +112,10 @@ def __init__(self, expedition, from_data): "phyc": "phyc", "nppv": "nppv", } + limit_spec = { + "spatial": True + } # spatial limits; lat/lon constrained to waypoint locations + buffer + super().__init__( expedition, variables, @@ -119,7 +123,7 @@ def __init__(self, expedition, from_data): allow_time_extrapolation=True, verbose_progress=False, spacetime_buffer_size=None, - limit_spec=None, + limit_spec=limit_spec, from_data=from_data, ) diff --git a/src/virtualship/instruments/drifter.py b/src/virtualship/instruments/drifter.py index 8c531455..c96b2d86 100644 --- a/src/virtualship/instruments/drifter.py +++ b/src/virtualship/instruments/drifter.py @@ -67,10 +67,12 @@ def __init__(self, expedition, from_data): """Initialize DrifterInstrument.""" variables = {"U": "uo", "V": "vo", "T": "thetao"} spacetime_buffer_size = { - "latlon": 6.0, # [degrees] - "time": 21.0, # [days] + "latlon": None, + "time": expedition.instruments_config.drifter_config.lifetime.total_seconds() + / (24 * 3600), # [days] } limit_spec = { + "spatial": False, # no spatial limits; generate global fieldset "depth_min": 1.0, # [meters] "depth_max": 1.0, # [meters] } @@ -90,7 +92,6 @@ def simulate(self, measurements, out_path) -> None: """Simulate Drifter measurements.""" OUTPUT_DT = timedelta(hours=5) DT = timedelta(minutes=5) - ENDTIME = None if len(measurements) == 0: print( @@ -132,29 +133,14 @@ def simulate(self, measurements, out_path) -> None: chunks=[len(drifter_particleset), 100], ) - # get earliest between fieldset end time and prescribed end time - fieldset_endtime = fieldset.time_origin.fulltime(fieldset.U.grid.time_full[-1]) - if ENDTIME is None: - actual_endtime = fieldset_endtime - elif ENDTIME > fieldset_endtime: - print("WARN: Requested end time later than fieldset end time.") - actual_endtime = fieldset_endtime - else: - actual_endtime = np.timedelta64(ENDTIME) + # determine end time for simulation, from fieldset (which itself is controlled by drifter lifetimes) + endtime = fieldset.time_origin.fulltime(fieldset.U.grid.time_full[-1]) # execute simulation drifter_particleset.execute( [AdvectionRK4, _sample_temperature, _check_lifetime], - endtime=actual_endtime, + endtime=endtime, dt=DT, output_file=out_file, verbose_progress=self.verbose_progress, ) - - # if there are more particles left than the number of drifters with an indefinite endtime, warn the user - if len(drifter_particleset.particledata) > len( - [d for d in measurements if d.lifetime is None] - ): - print( - "WARN: Some drifters had a life time beyond the end time of the fieldset or the requested end time." - ) diff --git a/src/virtualship/instruments/ship_underwater_st.py b/src/virtualship/instruments/ship_underwater_st.py index 1e66ba50..8b7ef96d 100644 --- a/src/virtualship/instruments/ship_underwater_st.py +++ b/src/virtualship/instruments/ship_underwater_st.py @@ -62,6 +62,9 @@ def __init__(self, expedition, from_data): "latlon": 0.25, # [degrees] "time": 0.0, # [days] } + limit_spec = { + "spatial": True + } # spatial limits; lat/lon constrained to waypoint locations + buffer super().__init__( expedition, @@ -70,7 +73,7 @@ def __init__(self, expedition, from_data): allow_time_extrapolation=True, verbose_progress=False, spacetime_buffer_size=spacetime_buffer_size, - limit_spec=None, + limit_spec=limit_spec, from_data=from_data, ) diff --git a/src/virtualship/instruments/xbt.py b/src/virtualship/instruments/xbt.py index f0f5d130..2412306f 100644 --- a/src/virtualship/instruments/xbt.py +++ b/src/virtualship/instruments/xbt.py @@ -80,6 +80,10 @@ class XBTInstrument(Instrument): def __init__(self, expedition, from_data): """Initialize XBTInstrument.""" variables = {"T": "thetao"} + limit_spec = { + "spatial": True + } # spatial limits; lat/lon constrained to waypoint locations + buffer + super().__init__( expedition, variables, @@ -87,7 +91,7 @@ def __init__(self, expedition, from_data): allow_time_extrapolation=True, verbose_progress=False, spacetime_buffer_size=None, - limit_spec=None, + limit_spec=limit_spec, from_data=from_data, ) diff --git a/src/virtualship/models/__init__.py b/src/virtualship/models/__init__.py index 5eaabb85..d61c1719 100644 --- a/src/virtualship/models/__init__.py +++ b/src/virtualship/models/__init__.py @@ -15,11 +15,6 @@ XBTConfig, ) from .location import Location -from .space_time_region import ( - SpaceTimeRegion, - SpatialRange, - TimeRange, -) from .spacetime import ( Spacetime, ) @@ -36,9 +31,6 @@ "ShipUnderwaterSTConfig", "DrifterConfig", "XBTConfig", - "SpatialRange", - "TimeRange", - "SpaceTimeRegion", "Spacetime", "Expedition", "InstrumentsConfig", diff --git a/src/virtualship/models/expedition.py b/src/virtualship/models/expedition.py index e6e80102..b8f65558 100644 --- a/src/virtualship/models/expedition.py +++ b/src/virtualship/models/expedition.py @@ -11,10 +11,13 @@ from virtualship.errors import InstrumentsConfigError, ScheduleError from virtualship.instruments.types import InstrumentType -from virtualship.utils import _get_bathy_data, _validate_numeric_mins_to_timedelta +from virtualship.utils import ( + _get_bathy_data, + _get_waypoint_latlons, + _validate_numeric_to_timedelta, +) from .location import Location -from .space_time_region import SpaceTimeRegion projection: pyproj.Geod = pyproj.Geod(ellps="WGS84") @@ -77,7 +80,6 @@ class Schedule(pydantic.BaseModel): """Schedule of the virtual ship.""" waypoints: list[Waypoint] - space_time_region: SpaceTimeRegion | None = None model_config = pydantic.ConfigDict(extra="forbid") @@ -86,7 +88,6 @@ def verify( ship_speed: float, ignore_land_test: bool = False, *, - check_space_time_region: bool = False, from_data: Path | None = None, ) -> None: """ @@ -101,11 +102,6 @@ def verify( """ print("\nVerifying route... ") - if check_space_time_region and self.space_time_region is None: - raise ScheduleError( - "space_time_region not found in schedule, please define it to proceed." - ) - if len(self.waypoints) == 0: raise ScheduleError("At least one waypoint must be provided.") @@ -128,9 +124,12 @@ def verify( land_waypoints = [] if not ignore_land_test: try: + wp_lats, wp_lons = _get_waypoint_latlons(self.waypoints) bathymetry_field = _get_bathy_data( - self.space_time_region, - latlon_buffer=None, + min(wp_lats), + max(wp_lats), + min(wp_lons), + max(wp_lons), from_data=from_data, ).bathymetry except Exception as e: @@ -150,7 +149,7 @@ def verify( land_waypoints.append((wp_i, wp)) except Exception as e: raise ScheduleError( - f"Waypoint #{wp_i + 1} at location {wp.location} could not be evaluated against bathymetry data. There may be a problem with the waypoint location being outside of the space_time_region or with the bathymetry data itself.\n\n Original error: {e}" + f"Waypoint #{wp_i + 1} at location {wp.location} could not be evaluated against bathymetry data. \n\n Original error: {e}" ) from e if len(land_waypoints) > 0: @@ -214,6 +213,11 @@ class ArgoFloatConfig(pydantic.BaseModel): vertical_speed_meter_per_second: float = pydantic.Field(lt=0.0) cycle_days: float = pydantic.Field(gt=0.0) drift_days: float = pydantic.Field(gt=0.0) + lifetime: timedelta = pydantic.Field( + serialization_alias="lifetime_days", + validation_alias="lifetime_days", + gt=timedelta(), + ) stationkeeping_time: timedelta = pydantic.Field( serialization_alias="stationkeeping_time_minutes", @@ -221,13 +225,21 @@ class ArgoFloatConfig(pydantic.BaseModel): gt=timedelta(), ) + @pydantic.field_serializer("lifetime") + def _serialize_lifetime(self, value: timedelta, _info): + return value.total_seconds() / 86400.0 # [days] + + @pydantic.field_validator("lifetime", mode="before") + def _validate_lifetime(cls, value: int | float | timedelta) -> timedelta: + return _validate_numeric_to_timedelta(value, "days") + @pydantic.field_serializer("stationkeeping_time") def _serialize_stationkeeping_time(self, value: timedelta, _info): return value.total_seconds() / 60.0 @pydantic.field_validator("stationkeeping_time", mode="before") def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "minutes") model_config = pydantic.ConfigDict(populate_by_name=True) @@ -251,7 +263,7 @@ def _serialize_period(self, value: timedelta, _info): @pydantic.field_validator("period", mode="before") def _validate_period(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "minutes") class CTDConfig(pydantic.BaseModel): @@ -273,7 +285,7 @@ def _serialize_stationkeeping_time(self, value: timedelta, _info): @pydantic.field_validator("stationkeeping_time", mode="before") def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "minutes") class CTD_BGCConfig(pydantic.BaseModel): @@ -295,7 +307,7 @@ def _serialize_stationkeeping_time(self, value: timedelta, _info): @pydantic.field_validator("stationkeeping_time", mode="before") def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "minutes") class ShipUnderwaterSTConfig(pydantic.BaseModel): @@ -315,7 +327,7 @@ def _serialize_period(self, value: timedelta, _info): @pydantic.field_validator("period", mode="before") def _validate_period(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "minutes") class DrifterConfig(pydantic.BaseModel): @@ -323,8 +335,8 @@ class DrifterConfig(pydantic.BaseModel): depth_meter: float = pydantic.Field(le=0.0) lifetime: timedelta = pydantic.Field( - serialization_alias="lifetime_minutes", - validation_alias="lifetime_minutes", + serialization_alias="lifetime_days", + validation_alias="lifetime_days", gt=timedelta(), ) stationkeeping_time: timedelta = pydantic.Field( @@ -337,11 +349,11 @@ class DrifterConfig(pydantic.BaseModel): @pydantic.field_serializer("lifetime") def _serialize_lifetime(self, value: timedelta, _info): - return value.total_seconds() / 60.0 + return value.total_seconds() / 86400.0 # [days] @pydantic.field_validator("lifetime", mode="before") def _validate_lifetime(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "days") @pydantic.field_serializer("stationkeeping_time") def _serialize_stationkeeping_time(self, value: timedelta, _info): @@ -349,7 +361,7 @@ def _serialize_stationkeeping_time(self, value: timedelta, _info): @pydantic.field_validator("stationkeeping_time", mode="before") def _validate_stationkeeping_time(cls, value: int | float | timedelta) -> timedelta: - return _validate_numeric_mins_to_timedelta(value) + return _validate_numeric_to_timedelta(value, "minutes") class XBTConfig(pydantic.BaseModel): diff --git a/src/virtualship/models/space_time_region.py b/src/virtualship/models/space_time_region.py deleted file mode 100644 index 48ad5699..00000000 --- a/src/virtualship/models/space_time_region.py +++ /dev/null @@ -1,64 +0,0 @@ -"""SpaceTimeRegion class.""" - -from datetime import datetime -from typing import Annotated - -from pydantic import BaseModel, Field, model_validator -from typing_extensions import Self - -Longitude = Annotated[float, Field(..., ge=-180, le=180)] -Latitude = Annotated[float, Field(..., ge=-90, le=90)] -Depth = float # TODO: insert a minimum depth here? e.g., `Annotated[float, Field(..., ge=0)]` -# TODO: is_valid_depth in validator_utils.py will alse need to be updated if this TODO is implemented - - -class SpatialRange(BaseModel): - """Defines geographic boundaries.""" - - minimum_longitude: Longitude - maximum_longitude: Longitude - minimum_latitude: Latitude - maximum_latitude: Latitude - minimum_depth: Depth | None = None - maximum_depth: Depth | None = None - - @model_validator(mode="after") - def _check_lon_lat_domain(self) -> Self: - if not self.minimum_longitude < self.maximum_longitude: - raise ValueError("minimum_longitude must be less than maximum_longitude") - if not self.minimum_latitude < self.maximum_latitude: - raise ValueError("minimum_latitude must be less than maximum_latitude") - - if sum([self.minimum_depth is None, self.maximum_depth is None]) == 1: - raise ValueError("Both minimum_depth and maximum_depth must be provided.") - - if self.minimum_depth is None: - return self - - if not self.minimum_depth < self.maximum_depth: - raise ValueError("minimum_depth must be less than maximum_depth") - return self - - -class TimeRange(BaseModel): - """Defines the temporal boundaries for a space-time region.""" - - #! TODO: Remove the `| None` for `start_time` and `end_time`, and have the MFP functionality not use pydantic (with testing to avoid codebase drift) - start_time: datetime | None = None - end_time: datetime | None = None - - @model_validator(mode="after") - def _check_time_range(self) -> Self: - if ( - self.start_time and self.end_time - ): #! TODO: remove this check once `start_time` and `end_time` are required - if not self.start_time < self.end_time: - raise ValueError("start_time must be before end_time") - return self - - -class SpaceTimeRegion(BaseModel): - """An space-time region with spatial and temporal boundaries.""" - - spatial_range: SpatialRange - time_range: TimeRange diff --git a/src/virtualship/static/expedition.yaml b/src/virtualship/static/expedition.yaml index 4c7394a3..388961c0 100644 --- a/src/virtualship/static/expedition.yaml +++ b/src/virtualship/static/expedition.yaml @@ -1,15 +1,4 @@ schedule: - space_time_region: - spatial_range: - minimum_longitude: -5 - maximum_longitude: 5 - minimum_latitude: -5 - maximum_latitude: 5 - minimum_depth: 0 - maximum_depth: 2000 - time_range: - start_time: 1998-01-01 00:00:00 - end_time: 1998-02-01 00:00:00 waypoints: - instrument: - CTD @@ -54,6 +43,7 @@ instruments_config: min_depth_meter: 0.0 vertical_speed_meter_per_second: -0.1 stationkeeping_time_minutes: 20.0 + lifetime_days: 63.0 ctd_config: max_depth_meter: -2000.0 min_depth_meter: -11.0 @@ -64,7 +54,7 @@ instruments_config: stationkeeping_time_minutes: 50.0 drifter_config: depth_meter: -1.0 - lifetime_minutes: 60480.0 + lifetime_days: 42.0 stationkeeping_time_minutes: 20.0 xbt_config: max_depth_meter: -285.0 diff --git a/src/virtualship/utils.py b/src/virtualship/utils.py index 251bc35f..c8493d7f 100644 --- a/src/virtualship/utils.py +++ b/src/virtualship/utils.py @@ -144,9 +144,6 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 InstrumentsConfig, Location, Schedule, - SpaceTimeRegion, - SpatialRange, - TimeRange, Waypoint, ) @@ -155,30 +152,6 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 coordinates_data = validate_coordinates(coordinates_data) - # maximum depth (in meters), buffer (in degrees) for each instrument - instrument_max_depths = { - "XBT": 2000, - "CTD": 5000, - "CTD_BGC": 5000, - "DRIFTER": 1, - "ARGO_FLOAT": 2000, - } - - spatial_range = SpatialRange( - minimum_longitude=coordinates_data["Longitude"].min(), - maximum_longitude=coordinates_data["Longitude"].max(), - minimum_latitude=coordinates_data["Latitude"].min(), - maximum_latitude=coordinates_data["Latitude"].max(), - minimum_depth=0, - maximum_depth=max(instrument_max_depths.values()), - ) - - # Create space-time region object - space_time_region = SpaceTimeRegion( - spatial_range=spatial_range, - time_range=TimeRange(), - ) - # Generate waypoints waypoints = [] for _, row in coordinates_data.iterrows(): @@ -192,7 +165,6 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 # Create Schedule object schedule = Schedule( waypoints=waypoints, - space_time_region=space_time_region, ) # extract instruments config from static @@ -214,11 +186,16 @@ def mfp_to_yaml(coordinates_file_path: str, yaml_output_path: str): # noqa: D41 expedition.to_yaml(yaml_output_path) -def _validate_numeric_mins_to_timedelta(value: int | float | timedelta) -> timedelta: - """Convert minutes to timedelta when reading.""" +def _validate_numeric_to_timedelta( + value: int | float | timedelta, unit: str +) -> timedelta: + """Convert to timedelta when reading.""" if isinstance(value, timedelta): return value - return timedelta(minutes=value) + if unit == "minutes": + return timedelta(minutes=float(value)) + elif unit == "days": + return timedelta(days=float(value)) def _get_expedition(expedition_dir: Path) -> Expedition: @@ -329,6 +306,8 @@ def add_dummy_UV(fieldset: FieldSet): COPERNICUSMARINE_PHYS_VARIABLES = ["uo", "vo", "so", "thetao"] COPERNICUSMARINE_BGC_VARIABLES = ["o2", "chl", "no3", "po4", "ph", "phyc", "nppv"] +BATHYMETRY_ID = "cmems_mod_glo_phy_my_0.083deg_static" + def _select_product_id( physical: bool, @@ -416,7 +395,11 @@ def _start_end_in_product_timerange( def _get_bathy_data( - space_time_region, latlon_buffer: float | None = None, from_data: Path | None = None + min_lat: float, + max_lat: float, + min_lon: float, + max_lon: float, + from_data: Path | None = None, ) -> FieldSet: """Bathymetry data from local or 'streamed' directly from Copernicus Marine.""" if from_data is not None: # load from local data @@ -438,18 +421,8 @@ def _get_bathy_data( else: # stream via Copernicus Marine Service ds_bathymetry = copernicusmarine.open_dataset( - dataset_id="cmems_mod_glo_phy_my_0.083deg_static", - minimum_longitude=space_time_region.spatial_range.minimum_longitude - - (latlon_buffer if latlon_buffer is not None else 0), - maximum_longitude=space_time_region.spatial_range.maximum_longitude - + (latlon_buffer if latlon_buffer is not None else 0), - minimum_latitude=space_time_region.spatial_range.minimum_latitude - - (latlon_buffer if latlon_buffer is not None else 0), - maximum_latitude=space_time_region.spatial_range.maximum_latitude - + (latlon_buffer if latlon_buffer is not None else 0), + dataset_id=BATHYMETRY_ID, variables=["deptho"], - start_datetime=space_time_region.time_range.start_time, - end_datetime=space_time_region.time_range.end_time, coordinates_selection_method="outside", ) bathymetry_variables = {"bathymetry": "deptho"} @@ -468,6 +441,7 @@ def expedition_cost(schedule_results: ScheduleOk, time_past: timedelta) -> float :param time_past: Time the expedition took. :returns: The calculated cost of the expedition in US$. """ + # TODO: refactor to instrument sub-classes attributes...? SHIP_COST_PER_DAY = 30000 DRIFTER_DEPLOY_COST = 2500 ARGO_DEPLOY_COST = 15000 @@ -566,3 +540,12 @@ def _random_noise(scale: float = 0.01, limit: float = 0.03) -> float: """Generate a small random noise value for drifter seeding locations.""" value = np.random.normal(loc=0.0, scale=scale) return np.clip(value, -limit, limit) # ensure noise is within limits + + +def _get_waypoint_latlons(waypoints): + """Extract latitudes and longitudes from waypoints.""" + wp_lats, wp_lons = zip( + *[(wp.location.latitude, wp.location.longitude) for wp in waypoints], + strict=True, + ) + return wp_lats, wp_lons diff --git a/tests/expedition/expedition_dir/expedition.yaml b/tests/expedition/expedition_dir/expedition.yaml index cd3f532a..65e0b540 100644 --- a/tests/expedition/expedition_dir/expedition.yaml +++ b/tests/expedition/expedition_dir/expedition.yaml @@ -30,6 +30,7 @@ instruments_config: min_depth_meter: 0.0 vertical_speed_meter_per_second: -0.1 stationkeeping_time_minutes: 20.0 + lifetime_days: 63.0 ctd_config: max_depth_meter: -2000.0 min_depth_meter: -11.0 @@ -40,7 +41,7 @@ instruments_config: stationkeeping_time_minutes: 50.0 drifter_config: depth_meter: -1.0 - lifetime_minutes: 40320.0 + lifetime_days: 28.0 stationkeeping_time_minutes: 20.0 ship_underwater_st_config: period_minutes: 5.0 diff --git a/tests/expedition/test_expedition.py b/tests/expedition/test_expedition.py index 78fff2c2..90027e8e 100644 --- a/tests/expedition/test_expedition.py +++ b/tests/expedition/test_expedition.py @@ -15,11 +15,6 @@ Schedule, Waypoint, ) -from virtualship.models.space_time_region import ( - SpaceTimeRegion, - SpatialRange, - TimeRange, -) from virtualship.utils import EXPEDITION, _get_expedition, get_example_expedition projection = pyproj.Geod(ellps="WGS84") @@ -136,20 +131,7 @@ def test_verify_on_land(): ), # NaN cell ] - spatial_range = SpatialRange( - minimum_latitude=min(wp.location.lat for wp in waypoints), - maximum_latitude=max(wp.location.lat for wp in waypoints), - minimum_longitude=min(wp.location.lon for wp in waypoints), - maximum_longitude=max(wp.location.lon for wp in waypoints), - ) - time_range = TimeRange( - start_time=min(wp.time for wp in waypoints if wp.time is not None), - end_time=max(wp.time for wp in waypoints if wp.time is not None), - ) - space_time_region = SpaceTimeRegion( - spatial_range=spatial_range, time_range=time_range - ) - schedule = Schedule(waypoints=waypoints, space_time_region=space_time_region) + schedule = Schedule(waypoints=waypoints) ship_speed_knots = _get_expedition(expedition_dir).ship_config.ship_speed_knots with patch( @@ -168,11 +150,10 @@ def test_verify_on_land(): @pytest.mark.parametrize( - "schedule,check_space_time_region,error,match", + "schedule,error,match", [ pytest.param( Schedule(waypoints=[]), - False, ScheduleError, "At least one waypoint must be provided.", id="NoWaypoints", @@ -186,7 +167,6 @@ def test_verify_on_land(): ), ] ), - False, ScheduleError, "First waypoint must have a specified time.", id="FirstWaypointHasTime", @@ -203,7 +183,6 @@ def test_verify_on_land(): ), ] ), - False, ScheduleError, "Waypoint\\(s\\) : each waypoint should be timed after all previous waypoints", id="SequentialWaypoints", @@ -219,39 +198,19 @@ def test_verify_on_land(): ), ] ), - False, ScheduleError, "Waypoint planning is not valid: would arrive too late at waypoint number 2...", id="NotEnoughTime", ), - pytest.param( - Schedule( - waypoints=[ - Waypoint( - location=Location(0, 0), time=datetime(2022, 1, 1, 1, 0, 0) - ), - Waypoint( - location=Location(1, 0), time=datetime(2022, 1, 2, 1, 1, 0) - ), - ] - ), - True, - ScheduleError, - "space_time_region not found in schedule, please define it to proceed.", - id="NoSpaceTimeRegion", - ), ], ) -def test_verify_schedule_errors( - schedule: Schedule, check_space_time_region: bool, error, match -) -> None: +def test_verify_schedule_errors(schedule: Schedule, error, match) -> None: expedition = _get_expedition(expedition_dir) with pytest.raises(error, match=match): schedule.verify( expedition.ship_config.ship_speed_knots, ignore_land_test=True, - check_space_time_region=check_space_time_region, ) diff --git a/tests/instruments/test_adcp.py b/tests/instruments/test_adcp.py index a2a5418a..0a88b206 100644 --- a/tests/instruments/test_adcp.py +++ b/tests/instruments/test_adcp.py @@ -4,10 +4,11 @@ import numpy as np import xarray as xr -from parcels import FieldSet +from parcels import FieldSet from virtualship.instruments.adcp import ADCPInstrument -from virtualship.models import Location, Spacetime +from virtualship.instruments.types import InstrumentType +from virtualship.models import Location, Spacetime, Waypoint def test_simulate_adcp(tmpdir) -> None: @@ -77,15 +78,24 @@ def test_simulate_adcp(tmpdir) -> None: }, ) - # dummy expedition and directory for ADCPInstrument + # dummy expedition for ADCPInstrument class DummyExpedition: + class schedule: + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + instrument=InstrumentType.ADCP, + ), + ] + class instruments_config: class adcp_config: max_depth_meter = MAX_DEPTH num_bins = NUM_BINS expedition = DummyExpedition() - from_data = None adcp_instrument = ADCPInstrument(expedition, from_data) diff --git a/tests/instruments/test_argo_float.py b/tests/instruments/test_argo_float.py index 22c9727f..66331d64 100644 --- a/tests/instruments/test_argo_float.py +++ b/tests/instruments/test_argo_float.py @@ -8,6 +8,7 @@ from virtualship.instruments.argo_float import ArgoFloat, ArgoFloatInstrument from virtualship.models import Location, Spacetime +from virtualship.models.expedition import Waypoint def test_simulate_argo_floats(tmpdir) -> None: @@ -19,6 +20,7 @@ def test_simulate_argo_floats(tmpdir) -> None: VERTICAL_SPEED = -0.10 CYCLE_DAYS = 10 DRIFT_DAYS = 9 + LIFETIME = timedelta(days=1) CONST_TEMPERATURE = 1.0 # constant temperature in fieldset CONST_SALINITY = 1.0 # constant salinity in fieldset @@ -63,9 +65,20 @@ def test_simulate_argo_floats(tmpdir) -> None: ) ] - # dummy expedition and directory for ArgoFloatInstrument + # dummy expedition for ArgoFloatInstrument class DummyExpedition: - pass + class schedule: + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + ), + ] + + class instruments_config: + class argo_float_config: + lifetime = LIFETIME expedition = DummyExpedition() from_data = None diff --git a/tests/instruments/test_base.py b/tests/instruments/test_base.py index 29d319ba..fea43f02 100644 --- a/tests/instruments/test_base.py +++ b/tests/instruments/test_base.py @@ -32,8 +32,14 @@ def test_load_input_data(mock_copernicusmarine, mock_select_product_id, mock_Fie mock_fieldset.gridset.grids = [MagicMock(negate_depth=MagicMock())] mock_fieldset.__getitem__.side_effect = lambda k: MagicMock() mock_copernicusmarine.open_dataset.return_value = MagicMock() + # Create a mock waypoint with latitude and longitude + mock_waypoint = MagicMock() + mock_waypoint.location.latitude = 1.0 + mock_waypoint.location.longitude = 2.0 + mock_schedule = MagicMock() + mock_schedule.waypoints = [mock_waypoint] dummy = DummyInstrument( - expedition=MagicMock(schedule=MagicMock(space_time_region=MagicMock())), + expedition=MagicMock(schedule=mock_schedule), variables={"A": "a"}, add_bathymetry=False, allow_time_extrapolation=False, @@ -47,8 +53,13 @@ def test_load_input_data(mock_copernicusmarine, mock_select_product_id, mock_Fie def test_execute_calls_simulate(monkeypatch): + mock_waypoint = MagicMock() + mock_waypoint.location.latitude = 1.0 + mock_waypoint.location.longitude = 2.0 + mock_schedule = MagicMock() + mock_schedule.waypoints = [mock_waypoint] dummy = DummyInstrument( - expedition=MagicMock(schedule=MagicMock(space_time_region=MagicMock())), + expedition=MagicMock(schedule=mock_schedule), variables={"A": "a"}, add_bathymetry=False, allow_time_extrapolation=False, @@ -61,8 +72,13 @@ def test_execute_calls_simulate(monkeypatch): def test_get_spec_value_buffer_and_limit(): + mock_waypoint = MagicMock() + mock_waypoint.location.latitude = 1.0 + mock_waypoint.location.longitude = 2.0 + mock_schedule = MagicMock() + mock_schedule.waypoints = [mock_waypoint] dummy = DummyInstrument( - expedition=MagicMock(schedule=MagicMock(space_time_region=MagicMock())), + expedition=MagicMock(schedule=mock_schedule), variables={"A": "a"}, add_bathymetry=False, allow_time_extrapolation=False, @@ -77,8 +93,13 @@ def test_get_spec_value_buffer_and_limit(): def test_generate_fieldset_combines_fields(monkeypatch): + mock_waypoint = MagicMock() + mock_waypoint.location.latitude = 1.0 + mock_waypoint.location.longitude = 2.0 + mock_schedule = MagicMock() + mock_schedule.waypoints = [mock_waypoint] dummy = DummyInstrument( - expedition=MagicMock(schedule=MagicMock(space_time_region=MagicMock())), + expedition=MagicMock(schedule=mock_schedule), variables={"A": "a", "B": "b"}, add_bathymetry=False, allow_time_extrapolation=False, @@ -87,7 +108,9 @@ def test_generate_fieldset_combines_fields(monkeypatch): ) dummy.from_data = None - monkeypatch.setattr(dummy, "_get_copernicus_ds", lambda physical, var: MagicMock()) + monkeypatch.setattr( + dummy, "_get_copernicus_ds", lambda *args, **kwargs: MagicMock() + ) fs_A = MagicMock() fs_B = MagicMock() @@ -102,8 +125,13 @@ def test_generate_fieldset_combines_fields(monkeypatch): def test_load_input_data_error(monkeypatch): + mock_waypoint = MagicMock() + mock_waypoint.location.latitude = 1.0 + mock_waypoint.location.longitude = 2.0 + mock_schedule = MagicMock() + mock_schedule.waypoints = [mock_waypoint] dummy = DummyInstrument( - expedition=MagicMock(schedule=MagicMock(space_time_region=MagicMock())), + expedition=MagicMock(schedule=mock_schedule), variables={"A": "a"}, add_bathymetry=False, allow_time_extrapolation=False, diff --git a/tests/instruments/test_ctd.py b/tests/instruments/test_ctd.py index fff5fc4f..954d0b78 100644 --- a/tests/instruments/test_ctd.py +++ b/tests/instruments/test_ctd.py @@ -8,10 +8,11 @@ import numpy as np import xarray as xr -from parcels import Field, FieldSet +from parcels import Field, FieldSet from virtualship.instruments.ctd import CTD, CTDInstrument from virtualship.models import Location, Spacetime +from virtualship.models.expedition import Waypoint def test_simulate_ctds(tmpdir) -> None: @@ -101,35 +102,18 @@ def test_simulate_ctds(tmpdir) -> None: ) fieldset.add_field(Field("bathymetry", [-1000], lon=0, lat=0)) - # dummy expedition and directory for CTDInstrument + # dummy expedition for CTDInstrument class DummyExpedition: class schedule: - class space_time_region: - time_range = type( - "TimeRange", - (), - { - "start_time": fieldset.T.grid.time_origin.fulltime( - fieldset.T.grid.time_full[0] - ), - "end_time": fieldset.T.grid.time_origin.fulltime( - fieldset.T.grid.time_full[-1] - ), - }, - )() - spatial_range = type( - "SpatialRange", - (), - { - "minimum_longitude": 0, - "maximum_longitude": 1, - "minimum_latitude": 0, - "maximum_latitude": 1, - }, - )() + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + ), + ] expedition = DummyExpedition() - from_data = None ctd_instrument = CTDInstrument(expedition, from_data) diff --git a/tests/instruments/test_ctd_bgc.py b/tests/instruments/test_ctd_bgc.py index 00f30077..39fa6c1f 100644 --- a/tests/instruments/test_ctd_bgc.py +++ b/tests/instruments/test_ctd_bgc.py @@ -8,10 +8,11 @@ import numpy as np import xarray as xr -from parcels import Field, FieldSet +from parcels import Field, FieldSet from virtualship.instruments.ctd_bgc import CTD_BGC, CTD_BGCInstrument from virtualship.models import Location, Spacetime +from virtualship.models.expedition import Waypoint def test_simulate_ctd_bgcs(tmpdir) -> None: @@ -162,9 +163,16 @@ def test_simulate_ctd_bgcs(tmpdir) -> None: ) fieldset.add_field(Field("bathymetry", [-1000], lon=0, lat=0)) - # dummy expedition and directory for CTD_BGCInstrument + # dummy expedition for CTD_BGCInstrument class DummyExpedition: - pass + class schedule: + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + ), + ] expedition = DummyExpedition() from_data = None diff --git a/tests/instruments/test_drifter.py b/tests/instruments/test_drifter.py index 03d04ea8..51f78883 100644 --- a/tests/instruments/test_drifter.py +++ b/tests/instruments/test_drifter.py @@ -4,10 +4,11 @@ import numpy as np import xarray as xr -from parcels import FieldSet +from parcels import FieldSet from virtualship.instruments.drifter import Drifter, DrifterInstrument from virtualship.models import Location, Spacetime +from virtualship.models.expedition import Waypoint def test_simulate_drifters(tmpdir) -> None: @@ -16,6 +17,8 @@ def test_simulate_drifters(tmpdir) -> None: CONST_TEMPERATURE = 1.0 # constant temperature in fieldset + LIFETIME = datetime.timedelta(days=1) + v = np.full((2, 2, 2), 1.0) u = np.full((2, 2, 2), 1.0) t = np.full((2, 2, 2), CONST_TEMPERATURE) @@ -52,12 +55,22 @@ def test_simulate_drifters(tmpdir) -> None: ), ] - # dummy expedition and directory for DrifterInstrument + # dummy expedition for DrifterInstrument class DummyExpedition: - pass + class schedule: + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + ), + ] + + class instruments_config: + class drifter_config: + lifetime = LIFETIME expedition = DummyExpedition() - from_data = None drifter_instrument = DrifterInstrument(expedition, from_data) diff --git a/tests/instruments/test_ship_underwater_st.py b/tests/instruments/test_ship_underwater_st.py index e7ca18d1..3f1aae65 100644 --- a/tests/instruments/test_ship_underwater_st.py +++ b/tests/instruments/test_ship_underwater_st.py @@ -4,10 +4,11 @@ import numpy as np import xarray as xr -from parcels import FieldSet +from parcels import FieldSet from virtualship.instruments.ship_underwater_st import Underwater_STInstrument from virtualship.models import Location, Spacetime +from virtualship.models.expedition import Waypoint def test_simulate_ship_underwater_st(tmpdir) -> None: @@ -67,12 +68,18 @@ def test_simulate_ship_underwater_st(tmpdir) -> None: }, ) - # dummy expedition and directory for Underwater_STInstrument + # dummy expedition for Underwater_STInstrument class DummyExpedition: - pass + class schedule: + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + ), + ] expedition = DummyExpedition() - from_data = None st_instrument = Underwater_STInstrument(expedition, from_data) diff --git a/tests/instruments/test_xbt.py b/tests/instruments/test_xbt.py index d218025a..c6a36631 100644 --- a/tests/instruments/test_xbt.py +++ b/tests/instruments/test_xbt.py @@ -8,10 +8,11 @@ import numpy as np import xarray as xr -from parcels import Field, FieldSet +from parcels import Field, FieldSet from virtualship.instruments.xbt import XBT, XBTInstrument from virtualship.models import Location, Spacetime +from virtualship.models.expedition import Waypoint def test_simulate_xbts(tmpdir) -> None: @@ -95,12 +96,18 @@ def test_simulate_xbts(tmpdir) -> None: ) fieldset.add_field(Field("bathymetry", [-1000], lon=0, lat=0)) - # dummy expedition and directory for XBTInstrument + # dummy expedition for XBTInstrument class DummyExpedition: - pass + class schedule: + # ruff: noqa + waypoints = [ + Waypoint( + location=Location(1, 2), + time=base_time, + ), + ] expedition = DummyExpedition() - from_data = None xbt_instrument = XBTInstrument(expedition, from_data) diff --git a/tests/test_utils.py b/tests/test_utils.py index 8bd2338e..deca66d5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,3 +1,4 @@ +import datetime from pathlib import Path import numpy as np @@ -24,37 +25,6 @@ def expedition(tmp_file): return Expedition.from_yaml(tmp_file) -@pytest.fixture -def dummy_spatial_range(): - class DummySpatialRange: - minimum_longitude = 0 - maximum_longitude = 1 - minimum_latitude = 0 - maximum_latitude = 1 - minimum_depth = 0 - maximum_depth = 4 - - return DummySpatialRange() - - -@pytest.fixture -def dummy_time_range(): - class DummyTimeRange: - start_time = "2020-01-01" - end_time = "2020-01-02" - - return DummyTimeRange() - - -@pytest.fixture -def dummy_space_time_region(dummy_spatial_range, dummy_time_range): - class DummySpaceTimeRegion: - spatial_range = dummy_spatial_range - time_range = dummy_time_range - - return DummySpaceTimeRegion() - - @pytest.fixture def dummy_instrument(): class DummyInstrument: @@ -132,11 +102,13 @@ def test_add_dummy_UV_adds_fields(): @pytest.mark.usefixtures("copernicus_no_download") def test_select_product_id(expedition): - """Should return the physical reanalysis product id via the timings prescribed in the static schedule.yaml file.""" + """Should return the physical reanalysis product id via the timings prescribed.""" result = _select_product_id( physical=True, - schedule_start=expedition.schedule.space_time_region.time_range.start_time, - schedule_end=expedition.schedule.space_time_region.time_range.end_time, + schedule_start=datetime.datetime( + 1995, 6, 1, 0, 0, 0 + ), # known to be in reanalysis range + schedule_end=datetime.datetime(1995, 6, 30, 0, 0, 0), username="test", password="test", ) @@ -145,17 +117,17 @@ def test_select_product_id(expedition): @pytest.mark.usefixtures("copernicus_no_download") def test_start_end_in_product_timerange(expedition): - """Should return True for valid range ass determined by the static schedule.yaml file.""" + """Should return True for valid range as determined by the static schedule.yaml file.""" assert _start_end_in_product_timerange( selected_id="cmems_mod_glo_phy_my_0.083deg_P1D-m", - schedule_start=expedition.schedule.space_time_region.time_range.start_time, - schedule_end=expedition.schedule.space_time_region.time_range.end_time, + schedule_start=datetime.datetime(1995, 6, 1, 0, 0, 0), + schedule_end=datetime.datetime(1995, 6, 30, 0, 0, 0), username="test", password="test", ) -def test_get_bathy_data_local(tmp_path, dummy_space_time_region): +def test_get_bathy_data_local(tmp_path): """Test that _get_bathy_data returns a FieldSet when given a local directory for --from-data.""" # dummy .nc file with 'deptho' variable data = np.array([[1, 2], [3, 4]]) @@ -173,13 +145,15 @@ def test_get_bathy_data_local(tmp_path, dummy_space_time_region): ds.to_netcdf(nc_path) # should return a FieldSet - fieldset = _get_bathy_data(dummy_space_time_region, from_data=tmp_path) + fieldset = _get_bathy_data( + min_lat=0.25, max_lat=0.75, min_lon=0.25, max_lon=0.75, from_data=tmp_path + ) assert isinstance(fieldset, FieldSet) assert hasattr(fieldset, "bathymetry") assert np.allclose(fieldset.bathymetry.data, data) -def test_get_bathy_data_copernicusmarine(monkeypatch, dummy_space_time_region): +def test_get_bathy_data_copernicusmarine(monkeypatch): """Test that _get_bathy_data calls copernicusmarine by default.""" def dummy_copernicusmarine(*args, **kwargs): @@ -190,7 +164,7 @@ def dummy_copernicusmarine(*args, **kwargs): ) try: - _get_bathy_data(dummy_space_time_region) + _get_bathy_data(min_lat=0.25, max_lat=0.75, min_lon=0.25, max_lon=0.75) except RuntimeError as e: assert "copernicusmarine called" in str(e)