diff --git a/flood_adapt/adapter/sfincs_adapter.py b/flood_adapt/adapter/sfincs_adapter.py index 2f5bdb87..e303fdd2 100644 --- a/flood_adapt/adapter/sfincs_adapter.py +++ b/flood_adapt/adapter/sfincs_adapter.py @@ -36,6 +36,7 @@ RainfallConstant, RainfallCSV, RainfallMeteo, + RainfallNetCDF, RainfallSynthetic, RainfallTrack, ) @@ -52,6 +53,7 @@ from flood_adapt.object_model.hazard.forcing.wind import ( WindConstant, WindMeteo, + WindNetCDF, WindSynthetic, WindTrack, ) @@ -339,6 +341,10 @@ def add_projection(self, projection: IProjection): ) ### GETTERS ### + def get_model_time(self) -> TimeModel: + t0, t1 = self._model.get_model_time() + return TimeModel(start_time=t0, end_time=t1) + def get_model_root(self) -> Path: return Path(self._model.root) @@ -909,8 +915,7 @@ def _add_forcing_wind( const_dir : float, optional direction of time-invariant wind forcing [deg], by default None """ - t0, t1 = self._model.get_model_time() - + time_frame = self.get_model_time() if isinstance(wind, WindConstant): # HydroMT function: set wind forcing from constant magnitude and direction self._model.setup_wind_forcing( @@ -919,7 +924,7 @@ def _add_forcing_wind( direction=wind.direction.value, ) elif isinstance(wind, WindSynthetic): - df = wind.to_dataframe(time_frame=TimeModel(start_time=t0, end_time=t1)) + df = wind.to_dataframe(time_frame=time_frame) df["mag"] *= us.UnitfulVelocity( value=1.0, units=Settings().unit_system.velocity ).convert(us.UnitTypesVelocity.mps) @@ -932,7 +937,7 @@ def _add_forcing_wind( timeseries=tmp_path, magnitude=None, direction=None ) elif isinstance(wind, WindMeteo): - ds = MeteoHandler().read(TimeModel(start_time=t0, end_time=t1)) + ds = MeteoHandler().read(time_frame) # data already in metric units so no conversion needed # HydroMT function: set wind forcing from grid @@ -942,6 +947,14 @@ def _add_forcing_wind( raise ValueError("No path to rainfall track file provided.") # data already in metric units so no conversion needed self._add_forcing_spw(wind.path) + elif isinstance(wind, WindNetCDF): + ds = wind.read() + # time slicing to time_frame not needed, hydromt-sfincs handles it + conversion = us.UnitfulVelocity(value=1.0, units=wind.unit).convert( + us.UnitTypesVelocity.mps + ) + ds *= conversion + self._model.setup_wind_forcing_from_grid(wind=ds) else: self.logger.warning( f"Unsupported wind forcing type: {wind.__class__.__name__}" @@ -958,8 +971,7 @@ def _add_forcing_rain(self, rainfall: IRainfall): const_intensity : float, optional time-invariant precipitation intensity [mm_hr], by default None """ - t0, t1 = self._model.get_model_time() - time_frame = TimeModel(start_time=t0, end_time=t1) + time_frame = self.get_model_time() if isinstance(rainfall, RainfallConstant): self._model.setup_precip_forcing( timeseries=None, @@ -984,13 +996,22 @@ def _add_forcing_rain(self, rainfall: IRainfall): self._model.setup_precip_forcing(timeseries=tmp_path) elif isinstance(rainfall, RainfallMeteo): ds = MeteoHandler().read(time_frame) - # data already in metric units so no conversion needed + # MeteoHandler always return metric so no conversion needed + ds["precip"] *= self._current_scenario.event.attrs.rainfall_multiplier self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False) elif isinstance(rainfall, RainfallTrack): if rainfall.path is None: raise ValueError("No path to rainfall track file provided.") # data already in metric units so no conversion needed self._add_forcing_spw(rainfall.path) + elif isinstance(rainfall, RainfallNetCDF): + ds = rainfall.read() + # time slicing to time_frame not needed, hydromt-sfincs handles it + conversion = us.UnitfulIntensity(value=1.0, units=rainfall.unit).convert( + us.UnitTypesIntensity.mm_hr + ) + ds *= self._current_scenario.event.attrs.rainfall_multiplier * conversion + self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False) else: self.logger.warning( f"Unsupported rainfall forcing type: {rainfall.__class__.__name__}" @@ -1015,8 +1036,7 @@ def _add_forcing_discharge(self, forcing: IDischarge): ) def _add_forcing_waterlevels(self, forcing: IWaterlevel): - t0, t1 = self._model.get_model_time() - time_frame = TimeModel(start_time=t0, end_time=t1) + time_frame = self.get_model_time() if isinstance(forcing, WaterlevelSynthetic): df_ts = forcing.to_dataframe(time_frame=time_frame) conversion = us.UnitfulLength( @@ -1222,8 +1242,8 @@ def _set_single_river_forcing(self, discharge: IDischarge): return self.logger.info(f"Setting discharge forcing for river: {discharge.river.name}") - t0, t1 = self._model.get_model_time() - time_frame = TimeModel(start_time=t0, end_time=t1) + + time_frame = self.get_model_time() model_rivers = self._read_river_locations() # Check that the river is defined in the model and that the coordinates match diff --git a/flood_adapt/dbs_classes/database.py b/flood_adapt/dbs_classes/database.py index f4de3729..44b5d2aa 100644 --- a/flood_adapt/dbs_classes/database.py +++ b/flood_adapt/dbs_classes/database.py @@ -7,11 +7,11 @@ import geopandas as gpd import numpy as np import pandas as pd +import xarray as xr from cht_cyclones.tropical_cyclone import TropicalCyclone from geopandas import GeoDataFrame from plotly.express import line from plotly.express.colors import sample_colorscale -from xarray import open_dataarray, open_dataset from flood_adapt.dbs_classes.dbs_benefit import DbsBenefit from flood_adapt.dbs_classes.dbs_event import DbsEvent @@ -484,17 +484,16 @@ def get_max_water_level( "Flooding", "max_water_level_map.nc", ) - map = open_dataarray(map_path) - - zsmax = map.to_numpy() - + with xr.open_dataarray(map_path) as map: + zsmax = map.to_numpy() else: file_path = self.scenarios.output_path.joinpath( scenario_name, "Flooding", f"RP_{return_period:04d}_maps.nc", ) - zsmax = open_dataset(file_path)["risk_map"][:, :].to_numpy().T + with xr.open_dataset(file_path) as ds: + zsmax = ds["risk_map"][:, :].to_numpy().T return zsmax def get_fiat_footprints(self, scenario_name: str) -> GeoDataFrame: diff --git a/flood_adapt/object_model/hazard/forcing/discharge.py b/flood_adapt/object_model/hazard/forcing/discharge.py index d72c4ced..206abd58 100644 --- a/flood_adapt/object_model/hazard/forcing/discharge.py +++ b/flood_adapt/object_model/hazard/forcing/discharge.py @@ -58,7 +58,7 @@ def to_dataframe(self, time_frame: TimeModel) -> pd.DataFrame: def save_additional(self, output_dir: Path | str | os.PathLike) -> None: if self.path: - output_dir = Path(output_dir) + output_dir = Path(output_dir).resolve() if self.path == output_dir / self.path.name: return output_dir.mkdir(parents=True, exist_ok=True) diff --git a/flood_adapt/object_model/hazard/forcing/netcdf.py b/flood_adapt/object_model/hazard/forcing/netcdf.py new file mode 100644 index 00000000..418aeaf2 --- /dev/null +++ b/flood_adapt/object_model/hazard/forcing/netcdf.py @@ -0,0 +1,40 @@ +import numpy as np +import pandas as pd +import xarray as xr + + +@staticmethod +def validate_netcdf_forcing( + ds: xr.Dataset, required_vars: tuple[str, ...], required_coords: tuple[str, ...] +) -> xr.Dataset: + """Validate a forcing dataset by checking for required variables and coordinates.""" + # Check variables + _required_vars = set(required_vars) + if not _required_vars.issubset(ds.data_vars): + missing_vars = _required_vars - set(ds.data_vars) + raise ValueError( + f"Missing required variables for netcdf forcing: {missing_vars}" + ) + + # Check coordinates + _required_coords = set(required_coords) + if not _required_coords.issubset(ds.coords): + missing_coords = _required_coords - set(ds.coords) + raise ValueError( + f"Missing required coordinates for netcdf forcing: {missing_coords}" + ) + + # Check time step + ts = pd.to_timedelta(np.diff(ds.time).mean()) + if ts < pd.to_timedelta("1H"): + raise ValueError( + f"SFINCS NetCDF forcing time step cannot be less than 1 hour: {ts}" + ) + + for var in ds.data_vars: + # Check order of dimensions + if ds[var].dims != required_coords: + raise ValueError( + f"Order of dimensions for variable {var} must be {required_coords}" + ) + return ds diff --git a/flood_adapt/object_model/hazard/forcing/rainfall.py b/flood_adapt/object_model/hazard/forcing/rainfall.py index ba912c8a..0c413a28 100644 --- a/flood_adapt/object_model/hazard/forcing/rainfall.py +++ b/flood_adapt/object_model/hazard/forcing/rainfall.py @@ -4,8 +4,10 @@ from typing import Optional import pandas as pd +import xarray as xr from pydantic import Field +from flood_adapt.object_model.hazard.forcing.netcdf import validate_netcdf_forcing from flood_adapt.object_model.hazard.forcing.timeseries import ( CSVTimeseries, SyntheticTimeseries, @@ -60,7 +62,7 @@ class RainfallTrack(IRainfall): def save_additional(self, output_dir: Path | str | os.PathLike) -> None: if self.path: - output_dir = Path(output_dir) + output_dir = Path(output_dir).resolve() if self.path == output_dir / self.path.name: return output_dir.mkdir(parents=True, exist_ok=True) @@ -81,9 +83,33 @@ def to_dataframe(self, time_frame: TimeModel) -> pd.DataFrame: def save_additional(self, output_dir: Path | str | os.PathLike) -> None: if self.path: - output_dir = Path(output_dir) + output_dir = Path(output_dir).resolve() if self.path == output_dir / self.path.name: return output_dir.mkdir(parents=True, exist_ok=True) shutil.copy2(self.path, output_dir) self.path = output_dir / self.path.name + + +class RainfallNetCDF(IRainfall): + source: ForcingSource = ForcingSource.NETCDF + unit: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr + + path: Path + + def read(self) -> xr.Dataset: + required_vars = ("precip",) + required_coords = ("time", "lat", "lon") + with xr.open_dataset(self.path) as ds: + validated_ds = validate_netcdf_forcing(ds, required_vars, required_coords) + return validated_ds + + def save_additional(self, output_dir: Path | str | os.PathLike) -> None: + if self.path: + output_dir = Path(output_dir).resolve() + if self.path == output_dir / self.path.name: + return + output_dir.mkdir(parents=True, exist_ok=True) + shutil.copy2(self.path, output_dir) + self.path = output_dir / self.path.name + diff --git a/flood_adapt/object_model/hazard/forcing/waterlevels.py b/flood_adapt/object_model/hazard/forcing/waterlevels.py index e64daf54..e43fcbe8 100644 --- a/flood_adapt/object_model/hazard/forcing/waterlevels.py +++ b/flood_adapt/object_model/hazard/forcing/waterlevels.py @@ -103,7 +103,7 @@ def to_dataframe(self, time_frame: TimeModel) -> pd.DataFrame: def save_additional(self, output_dir: Path | str | os.PathLike) -> None: if self.path: - output_dir = Path(output_dir) + output_dir = Path(output_dir).resolve() if self.path == output_dir / self.path.name: return output_dir.mkdir(parents=True, exist_ok=True) diff --git a/flood_adapt/object_model/hazard/forcing/wind.py b/flood_adapt/object_model/hazard/forcing/wind.py index bb6c21aa..222d8dd7 100644 --- a/flood_adapt/object_model/hazard/forcing/wind.py +++ b/flood_adapt/object_model/hazard/forcing/wind.py @@ -4,8 +4,10 @@ from typing import Optional import pandas as pd +import xarray as xr from pydantic import Field +from flood_adapt.object_model.hazard.forcing.netcdf import validate_netcdf_forcing from flood_adapt.object_model.hazard.forcing.timeseries import SyntheticTimeseries from flood_adapt.object_model.hazard.interface.forcing import ( ForcingSource, @@ -75,7 +77,7 @@ class WindTrack(IWind): def save_additional(self, output_dir: Path | str | os.PathLike) -> None: if self.path: - output_dir = Path(output_dir) + output_dir = Path(output_dir).resolve() if self.path == output_dir / self.path.name: return output_dir.mkdir(parents=True, exist_ok=True) @@ -94,7 +96,7 @@ def to_dataframe(self, time_frame: TimeModel) -> pd.DataFrame: def save_additional(self, output_dir: Path | str | os.PathLike) -> None: if self.path: - output_dir = Path(output_dir) + output_dir = Path(output_dir).resolve() if self.path == output_dir / self.path.name: return output_dir.mkdir(parents=True, exist_ok=True) @@ -104,3 +106,27 @@ def save_additional(self, output_dir: Path | str | os.PathLike) -> None: class WindMeteo(IWind): source: ForcingSource = ForcingSource.METEO + + +class WindNetCDF(IWind): + source: ForcingSource = ForcingSource.NETCDF + unit: us.UnitTypesVelocity = us.UnitTypesVelocity.mps + + path: Path + + def read(self) -> xr.Dataset: + required_vars = ("wind10_v", "wind10_u", "press_msl") + required_coords = ("time", "lat", "lon") + with xr.open_dataset(self.path) as ds: + validated_ds = validate_netcdf_forcing(ds, required_vars, required_coords) + return validated_ds + + def save_additional(self, output_dir: Path | str | os.PathLike) -> None: + if self.path: + output_dir = Path(output_dir).resolve() + if self.path == output_dir / self.path.name: + return + output_dir.mkdir(parents=True, exist_ok=True) + shutil.copy2(self.path, output_dir) + self.path = output_dir / self.path.name + diff --git a/flood_adapt/object_model/hazard/interface/forcing.py b/flood_adapt/object_model/hazard/interface/forcing.py index 011a05ed..ef735476 100644 --- a/flood_adapt/object_model/hazard/interface/forcing.py +++ b/flood_adapt/object_model/hazard/interface/forcing.py @@ -41,7 +41,8 @@ class ForcingSource(str, Enum): MODEL = "MODEL" # 'our' hindcast/ sfincs offshore model TRACK = "TRACK" # 'our' hindcast/ sfincs offshore model + (shifted) hurricane - CSV = "CSV" # user imported data + CSV = "CSV" # user provided csv file + NETCDF = "NETCDF" # user provided netcdf file SYNTHETIC = "SYNTHETIC" # synthetic data CONSTANT = "CONSTANT" # synthetic data diff --git a/flood_adapt/object_model/hazard/interface/models.py b/flood_adapt/object_model/hazard/interface/models.py index 66797ca7..e848facc 100644 --- a/flood_adapt/object_model/hazard/interface/models.py +++ b/flood_adapt/object_model/hazard/interface/models.py @@ -12,7 +12,7 @@ class TimeModel(BaseModel): start_time: datetime = REFERENCE_TIME end_time: datetime = REFERENCE_TIME + timedelta(days=1) - time_step: timedelta = timedelta(seconds=10) + time_step: timedelta = timedelta(minutes=10) @field_validator("start_time", "end_time", mode="before") @classmethod diff --git a/tests/test_adapter/test_sfincs_adapter.py b/tests/test_adapter/test_sfincs_adapter.py index c6b1ec65..cd0233ac 100644 --- a/tests/test_adapter/test_sfincs_adapter.py +++ b/tests/test_adapter/test_sfincs_adapter.py @@ -21,6 +21,7 @@ from flood_adapt.object_model.hazard.forcing.rainfall import ( RainfallConstant, RainfallMeteo, + RainfallNetCDF, RainfallSynthetic, ) from flood_adapt.object_model.hazard.forcing.waterlevels import ( @@ -34,6 +35,7 @@ from flood_adapt.object_model.hazard.forcing.wind import ( WindConstant, WindMeteo, + WindNetCDF, WindSynthetic, WindTrack, ) @@ -63,6 +65,9 @@ from flood_adapt.object_model.io import unit_system as us from flood_adapt.object_model.projection import Projection from tests.fixtures import TEST_DATA_DIR +from tests.test_object_model.test_events.test_forcing.test_netcdf import ( + get_test_dataset, +) @pytest.fixture() @@ -73,7 +78,11 @@ def default_sfincs_adapter(test_db) -> SfincsAdapter: duration = timedelta(hours=3) adapter.set_timing( - TimeModel(start_time=start_time, end_time=start_time + duration) + TimeModel( + start_time=start_time, + end_time=start_time + duration, + time_step=timedelta(hours=1), + ) ) adapter.logger = mock.Mock() adapter.logger.handlers = [] @@ -86,6 +95,7 @@ def default_sfincs_adapter(test_db) -> SfincsAdapter: @pytest.fixture() def sfincs_adapter_with_dummy_scn(default_sfincs_adapter): + # Mock scenario to get a rainfall multiplier dummy_scn = mock.Mock() dummy_event = mock.Mock() dummy_event.attrs.rainfall_multiplier = 2 @@ -123,6 +133,7 @@ def sfincs_adapter_2_rivers(test_db: IDatabase) -> tuple[IDatabase, SfincsAdapte adapter._logger = mock.Mock() adapter.logger.handlers = [] adapter.logger.warning = mock.Mock() + adapter.ensure_no_existing_forcings() return adapter, test_db @@ -360,6 +371,29 @@ def test_add_forcing_wind_from_meteo( assert default_sfincs_adapter.wind is not None + def test_add_forcing_wind_from_netcdf( + self, test_db: IDatabase, default_sfincs_adapter: SfincsAdapter + ): + # Arrange + path = Path(tempfile.gettempdir()) / "wind_netcdf.nc" + + time = TimeModel(time_step=timedelta(hours=1)) + default_sfincs_adapter.set_timing(time) + + ds = get_test_dataset( + time=time, + lat=int(test_db.site.attrs.lat), + lon=int(test_db.site.attrs.lon), + ) + ds.to_netcdf(path) + forcing = WindNetCDF(path=path) + + # Act + default_sfincs_adapter.add_forcing(forcing) + + # Assert + assert default_sfincs_adapter.wind is not None + def test_add_forcing_wind_from_track( self, test_db, tmp_path, default_sfincs_adapter: SfincsAdapter ): @@ -454,6 +488,30 @@ def test_add_forcing_from_meteo( # Assert assert adapter.rainfall is not None + def test_add_forcing_rainfall_from_netcdf( + self, test_db: IDatabase, sfincs_adapter_with_dummy_scn: SfincsAdapter + ): + # Arrange + adapter = sfincs_adapter_with_dummy_scn + path = Path(tempfile.gettempdir()) / "wind_netcdf.nc" + + time = TimeModel(time_step=timedelta(hours=1)) + adapter.set_timing(time) + + ds = get_test_dataset( + time=time, + lat=int(test_db.site.attrs.lat), + lon=int(test_db.site.attrs.lon), + ) + ds.to_netcdf(path) + forcing = RainfallNetCDF(path=path) + + # Act + adapter.add_forcing(forcing) + + # Assert + assert adapter.rainfall is not None + def test_add_forcing_unsupported( self, sfincs_adapter_with_dummy_scn: SfincsAdapter ): diff --git a/tests/test_object_model/test_events/test_forcing/test_netcdf.py b/tests/test_object_model/test_events/test_forcing/test_netcdf.py new file mode 100644 index 00000000..f8ed5ecd --- /dev/null +++ b/tests/test_object_model/test_events/test_forcing/test_netcdf.py @@ -0,0 +1,142 @@ +from copy import copy +from datetime import timedelta + +import numpy as np +import pandas as pd +import pytest +import xarray as xr + +from flood_adapt.object_model.hazard.forcing.netcdf import validate_netcdf_forcing +from flood_adapt.object_model.hazard.interface.models import TimeModel + + +@pytest.fixture +def required_vars(): + return ("wind10_u", "wind10_v", "press_msl", "precip") + + +@pytest.fixture +def required_coords(): + return ("time", "lat", "lon") + + +def get_test_dataset( + lat: int = -80, + lon: int = 32, + time: TimeModel = TimeModel(time_step=timedelta(hours=1)), + coords: tuple[str, ...] = ("time", "lat", "lon"), + data_vars: tuple[str, ...] = ("wind10_u", "wind10_v", "press_msl", "precip"), +) -> xr.Dataset: + gen = np.random.default_rng(42) + + _lat = np.arange(lat - 10, lat + 10, 1) + _lon = np.arange(lon - 10, lon + 10, 1) + _time = pd.date_range( + start=time.start_time, + end=time.end_time, + freq=time.time_step, + name="time", + ) + + _coords = { + "time": _time, + "lat": _lat, + "lon": _lon, + } + coords_dict = {name: _coords.get(name, np.arange(10)) for name in coords} + + def _generate_data(dimensions): + shape = tuple(len(coords_dict[dim]) for dim in dimensions if dim in coords_dict) + return gen.random(shape) + + _data_vars = {name: (coords, _generate_data(coords)) for name in data_vars} + + ds = xr.Dataset( + data_vars=_data_vars, + coords=coords_dict, + attrs={ + "crs": 4326, + }, + ) + ds.raster.set_crs(4326) + + return ds + + +def test_all_datavars_all_coords(required_vars, required_coords): + # Arrange + ds = get_test_dataset() + + # Act + result = validate_netcdf_forcing( + ds, required_vars=required_vars, required_coords=required_coords + ) + + # Assert + assert result.equals(ds) + + +def test_missing_datavar_all_coords_raises_validation_error( + required_coords, required_vars +): + # Arrange + vars = tuple(copy(required_vars) + ("missing_var",)) + ds = get_test_dataset(data_vars=required_vars) + + # Act + with pytest.raises(ValueError) as e: + validate_netcdf_forcing(ds, required_vars=vars, required_coords=required_coords) + + # Assert + assert "missing_var" in str(e.value) + assert "Missing required variables for netcdf forcing:" in str(e.value) + + +def test_all_datavar_missing_coords_raises_validation_error( + required_vars, required_coords +): + # Arrange + coords = tuple(copy(required_coords) + ("missing_coord",)) + ds = get_test_dataset(coords=required_coords, data_vars=required_vars) + + # Act + with pytest.raises(ValueError) as e: + validate_netcdf_forcing(ds, required_vars=required_vars, required_coords=coords) + + # Assert + assert "Missing required coordinates for netcdf forcing:" in str(e.value) + assert "missing_coord" in str(e.value) + + +def test_netcdf_timestep_less_than_1_hour_raises(required_vars, required_coords): + # Arrange + ds = get_test_dataset( + time=TimeModel(time_step=timedelta(minutes=30)), + ) + + # Act + with pytest.raises(ValueError) as e: + validate_netcdf_forcing( + ds, required_vars=required_vars, required_coords=required_coords + ) + + # Assert + assert "SFINCS NetCDF forcing time step cannot be less than 1 hour" in str(e.value) + + +def test_netcdf_incorrect_coord_order_raises(required_vars, required_coords): + # Arrange + ds = get_test_dataset( + coords=required_coords[::-1], # reverse order + data_vars=required_vars, + ) + + # Act + with pytest.raises(ValueError) as e: + validate_netcdf_forcing( + ds, required_vars=required_vars, required_coords=required_coords + ) + + # Assert + assert "Order of dimensions for variable" in str(e.value) + assert f"must be {tuple(required_coords)}" in str(e.value)