diff --git a/hydrolib/core/dflowfm/ext/models.py b/hydrolib/core/dflowfm/ext/models.py index adb440f9d..ebc03d7b5 100644 --- a/hydrolib/core/dflowfm/ext/models.py +++ b/hydrolib/core/dflowfm/ext/models.py @@ -210,13 +210,9 @@ class SourceSink(INIBasedModel): zsource: Optional[Union[float, List[float]]] = Field(alias="zSource") zsink: Optional[Union[float, List[float]]] = Field(alias="zSink") - discharge: Union[float, List[float]] = Field(alias="discharge") area: Optional[float] = Field(alias="Area") - salinitydelta: Optional[Union[List[float], float]] = Field(alias="SalinityDelta") - temperaturedelta: Optional[Union[List[float], float]] = Field( - alias="TemperatureDelta" - ) + bc_forcing: ForcingModel = Field(alias="bcForcing") @classmethod def _exclude_from_validation(cls, input_data: Optional[dict] = None) -> Set: diff --git a/hydrolib/tools/ext_old_to_new/converters.py b/hydrolib/tools/ext_old_to_new/converters.py index 6af749398..88264c5f8 100644 --- a/hydrolib/tools/ext_old_to_new/converters.py +++ b/hydrolib/tools/ext_old_to_new/converters.py @@ -315,7 +315,7 @@ def merge_mdu_and_ext_file_quantities( def parse_tim_model( self, tim_file: Path, ext_file_quantity_list: List[str], **mdu_quantities - ) -> Dict[str, List[float]]: + ) -> TimModel: """Parse the source and sinks related time series from the tim file. - Parse the TIM file and extract the time series data for each column. @@ -337,7 +337,8 @@ def parse_tim_model( only bool. (i.e. {"temperature", False, "salinity": True}) Returns: - Dict[str, List[float]]: A dictionary containing the time series data form each column in the tim_file. + TimeModel: The same `TimModel after assigning the quantity names, the time series data form each column in + the tim_file. the keys of the dictionary will be the quantity names, and the values will be the time series data. Raises: @@ -431,12 +432,8 @@ def parse_tim_model( f"Number of columns in the TIM file '{tim_file}: {len(time_series)}' does not match the number of " f"quantities in the external forcing file: {final_quantities_list}." ) - - time_series = { - final_quantities_list[i]: time_series[i + 1] - for i in range(len(final_quantities_list)) - } - return time_series + tim_model.quantities_names = final_quantities_list + return tim_model @property def root_dir(self) -> Path: @@ -502,10 +499,19 @@ def convert( f"TIM file '{tim_file}' not found for QUANTITY={forcing.quantity}" ) - time_series = self.parse_tim_model( + time_model = self.parse_tim_model( tim_file, ext_file_quantity_list, **temp_salinity_mdu ) + units = time_model.get_units() + user_defined_names = [ + f"user-defines-{i}" for i in range(len(time_model.quantities_names)) + ] + # get from the mdu file + start_time = "minutes since 2015-01-01 00:00:00" + forcing_model = TimToForcingConverter().convert( + time_model, start_time, units=units, user_defined_names=user_defined_names + ) data = { "id": "L1", "name": forcing.quantity, @@ -515,8 +521,8 @@ def convert( "ycoordinates": polyline.y, "zsource": z_source, "zsink": z_sink, + "bc_forcing": forcing_model, } - data = data | time_series new_block = SourceSink(**data) return new_block diff --git a/tests/dflowfm/ext/test_ext.py b/tests/dflowfm/ext/test_ext.py index 2d660c014..f6b580d9e 100644 --- a/tests/dflowfm/ext/test_ext.py +++ b/tests/dflowfm/ext/test_ext.py @@ -216,10 +216,39 @@ def test_initialize_with_time_series_file(self, time_series_file: Path): assert meteo.forcingfiletype == MeteoForcingFileType.bcascii +forcing_base_list = [ + { + "name": "user_defined_name_1", + "function": "timeseries", + "timeinterpolation": "linear", + "quantity": ["time", "discharge"], + "unit": ["minutes since 2015-01-01 00:00:00", "m3/s"], + "datablock": [[1], [1.1234]], + }, + { + "name": "user_defined_name_1", + "function": "timeseries", + "timeinterpolation": "linear", + "quantity": ["time", "salinitydelta"], + "unit": ["minutes since 2015-01-01 00:00:00", "ppt"], + "datablock": [[1, 2, 3, 4, 5], [3.0, 5.0, 12.0, 9.0, 23.0]], + }, + { + "name": "user_defined_name_2", + "function": "timeseries", + "timeinterpolation": "linear", + "quantity": ["time", "temperature"], + "unit": ["minutes since 2015-01-01 00:00:00", "C"], + "datablock": [[1, 2, 3, 4, 5], [2.0, 2.0, 5.0, 8.0, 10.0]], + }, +] + + class TestSourceSink: @pytest.fixture def source_sink_data(self) -> Dict[str, Any]: + data = { "id": "L1", "name": "discharge_salinity_temperature_sorsin", @@ -229,9 +258,7 @@ def source_sink_data(self) -> Dict[str, Any]: "ycoordinates": [12.950216, 6.350155], "zsource": -3.0, "zsink": -4.2, - "discharge": 1.1234, - "temperaturedelta": [2.0, 2.0, 5.0, 8.0, 10.0], - "salinitydelta": [3.0, 5.0, 12.0, 9.0, 23.0], + "bc_forcing": ForcingModel(**{"forcing": forcing_base_list}), "area": 5, } return data @@ -287,10 +314,7 @@ def test_time_series_discharge_case(self): "ycoordinates": [12.950216, 6.350155], "zsource": -3.0, "zsink": -4.2, - "discharge": [1.0, 2.0, 3.0, 5.0, 8.0], - "temperaturedelta": [2.0, 2.0, 5.0, 8.0, 10.0], - "salinitydelta": [3.0, 5.0, 12.0, 9.0, 23.0], + "bc_forcing": ForcingModel(**{"forcing": forcing_base_list}), } - source_sink = SourceSink(**data) - print(source_sink) + assert SourceSink(**data) diff --git a/tests/tools/test_converters_source_sink.py b/tests/tools/test_converters_source_sink.py index df313333c..fdb945317 100644 --- a/tests/tools/test_converters_source_sink.py +++ b/tests/tools/test_converters_source_sink.py @@ -76,7 +76,8 @@ def test_parse_tim_model(self, tim_file, ext_file_quantity_list, expected_data): time_series_data = converter.parse_tim_model( tim_file, ext_file_quantity_list ) - assert time_series_data == expected_data + data = time_series_data.as_dataframe().to_dict(orient="list") + assert data == expected_data @pytest.mark.parametrize( "tim_file, ext_file_quantity_list, mdu_quantities, expected_data", @@ -151,7 +152,8 @@ def test_parse_tim_model_with_mdu( time_series_data = converter.parse_tim_model( tim_file, ext_file_quantity_list, **mdu_quantities ) - assert time_series_data == expected_data + data = time_series_data.as_dataframe().to_dict(orient="list") + assert data == expected_data class TestSourceSinkConverter: @@ -231,13 +233,35 @@ def test_default(self): converter = SourceSinkConverter() converter.root_dir = "tests/data/input/source-sink" new_quantity_block = converter.convert(forcing, ext_file_other_quantities) - assert new_quantity_block.initialtracer_anyname == [4.0, 4.0, 4.0, 4.0, 4.0] - assert new_quantity_block.temperaturedelta == [3.0, 3.0, 3.0, 3.0, 3.0] - assert new_quantity_block.salinitydelta == [2.0, 2.0, 2.0, 2.0, 2.0] - assert new_quantity_block.discharge == [1.0, 1.0, 1.0, 1.0, 1.0] + assert new_quantity_block.zsink == [-4.2] assert new_quantity_block.zsource == [-3] + # check the converted bc_forcing + bc_forcing = new_quantity_block.bc_forcing + forcing_bases = bc_forcing.forcing + assert [forcing_bases[i].quantityunitpair[1].quantity for i in range(4)] == [ + "discharge", + "salinitydelta", + "temperaturedelta", + "initialtracer_anyname", + ] + assert [forcing_bases[i].quantityunitpair[1].unit for i in range(4)] == [ + "m3/s", + "ppt", + "C", + "Unknown", + ] + # check the values of the data block + # initialtracer_anyname + assert forcing_bases[3].datablock[1] == [4.0, 4.0, 4.0, 4.0, 4.0] + # temperature + assert forcing_bases[2].datablock[1] == [3.0, 3.0, 3.0, 3.0, 3.0] + # salinity + assert forcing_bases[1].datablock[1] == [2.0, 2.0, 2.0, 2.0, 2.0] + # discharge + assert forcing_bases[0].datablock[1] == [1.0, 1.0, 1.0, 1.0, 1.0] + def test_4_5_columns_polyline(self): """ The test case is based on the assumptions of the default test plus the following changes: @@ -290,13 +314,35 @@ def test_4_5_columns_polyline(self): tim_file = Path("leftsor.tim") with patch("pathlib.Path.with_suffix", return_value=tim_file): new_quantity_block = converter.convert(forcing, ext_file_other_quantities) - assert new_quantity_block.initialtracer_anyname == [4.0, 4.0, 4.0, 4.0, 4.0] - assert new_quantity_block.temperaturedelta == [3.0, 3.0, 3.0, 3.0, 3.0] - assert new_quantity_block.salinitydelta == [2.0, 2.0, 2.0, 2.0, 2.0] - assert new_quantity_block.discharge == [1.0, 1.0, 1.0, 1.0, 1.0] + assert new_quantity_block.zsink == [-4.2, -5.35] assert new_quantity_block.zsource == [-3, -2.90] + # check the converted bc_forcing + bc_forcing = new_quantity_block.bc_forcing + forcing_bases = bc_forcing.forcing + assert [forcing_bases[i].quantityunitpair[1].quantity for i in range(4)] == [ + "discharge", + "salinitydelta", + "temperaturedelta", + "initialtracer_anyname", + ] + assert [forcing_bases[i].quantityunitpair[1].unit for i in range(4)] == [ + "m3/s", + "ppt", + "C", + "Unknown", + ] + # check the values of the data block + # initialtracer_anyname + assert forcing_bases[3].datablock[1] == [4.0, 4.0, 4.0, 4.0, 4.0] + # temperature + assert forcing_bases[2].datablock[1] == [3.0, 3.0, 3.0, 3.0, 3.0] + # salinity + assert forcing_bases[1].datablock[1] == [2.0, 2.0, 2.0, 2.0, 2.0] + # discharge + assert forcing_bases[0].datablock[1] == [1.0, 1.0, 1.0, 1.0, 1.0] + def test_no_temperature_no_salinity(self): """ The test case is based on the assumptions of the default test plus the following changes: @@ -334,7 +380,22 @@ def test_no_temperature_no_salinity(self): tim_file = Path("no_temperature_no_salinity.tim") with patch("pathlib.Path.with_suffix", return_value=tim_file): new_quantity_block = converter.convert(forcing, ext_file_other_quantities) - assert new_quantity_block.initialtracer_anyname == [4.0, 4.0, 4.0, 4.0, 4.0] - assert new_quantity_block.discharge == [1.0, 1.0, 1.0, 1.0, 1.0] + assert new_quantity_block.zsink == [-4.2] assert new_quantity_block.zsource == [-3] + # check the converted bc_forcing + bc_forcing = new_quantity_block.bc_forcing + forcing_bases = bc_forcing.forcing + assert [forcing_bases[i].quantityunitpair[1].quantity for i in range(2)] == [ + "discharge", + "initialtracer_anyname", + ] + assert [forcing_bases[i].quantityunitpair[1].unit for i in range(2)] == [ + "m3/s", + "Unknown", + ] + # check the values of the data block + # initialtracer_anyname + assert forcing_bases[1].datablock[1] == [4.0, 4.0, 4.0, 4.0, 4.0] + # discharge + assert forcing_bases[0].datablock[1] == [1.0, 1.0, 1.0, 1.0, 1.0]