Skip to content

Commit

Permalink
change the SourceSink class to take ForcingModel instead of taking …
Browse files Browse the repository at this point in the history
…individual quantities

and update the `SourceSinkConverter` to use the `TimToForcingConverter`
  • Loading branch information
MAfarrag committed Jan 20, 2025
1 parent 6c182a5 commit 52a5766
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 35 deletions.
6 changes: 1 addition & 5 deletions hydrolib/core/dflowfm/ext/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,13 +210,9 @@ class SourceSink(INIBasedModel):

zsource: Optional[Union[float, List[float]]] = Field(alias="zSource")
zsink: Optional[Union[float, List[float]]] = Field(alias="zSink")
discharge: Union[float, List[float]] = Field(alias="discharge")
area: Optional[float] = Field(alias="Area")

salinitydelta: Optional[Union[List[float], float]] = Field(alias="SalinityDelta")
temperaturedelta: Optional[Union[List[float], float]] = Field(
alias="TemperatureDelta"
)
bc_forcing: ForcingModel = Field(alias="bcForcing")

@classmethod
def _exclude_from_validation(cls, input_data: Optional[dict] = None) -> Set:
Expand Down
26 changes: 16 additions & 10 deletions hydrolib/tools/ext_old_to_new/converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def merge_mdu_and_ext_file_quantities(

def parse_tim_model(
self, tim_file: Path, ext_file_quantity_list: List[str], **mdu_quantities
) -> Dict[str, List[float]]:
) -> TimModel:
"""Parse the source and sinks related time series from the tim file.
- Parse the TIM file and extract the time series data for each column.
Expand All @@ -337,7 +337,8 @@ def parse_tim_model(
only bool. (i.e. {"temperature", False, "salinity": True})
Returns:
Dict[str, List[float]]: A dictionary containing the time series data form each column in the tim_file.
TimeModel: The same `TimModel after assigning the quantity names, the time series data form each column in
the tim_file.
the keys of the dictionary will be the quantity names, and the values will be the time series data.
Raises:
Expand Down Expand Up @@ -431,12 +432,8 @@ def parse_tim_model(
f"Number of columns in the TIM file '{tim_file}: {len(time_series)}' does not match the number of "
f"quantities in the external forcing file: {final_quantities_list}."
)

time_series = {
final_quantities_list[i]: time_series[i + 1]
for i in range(len(final_quantities_list))
}
return time_series
tim_model.quantities_names = final_quantities_list
return tim_model

@property
def root_dir(self) -> Path:
Expand Down Expand Up @@ -502,10 +499,19 @@ def convert(
f"TIM file '{tim_file}' not found for QUANTITY={forcing.quantity}"
)

time_series = self.parse_tim_model(
time_model = self.parse_tim_model(
tim_file, ext_file_quantity_list, **temp_salinity_mdu
)
units = time_model.get_units()
user_defined_names = [
f"user-defines-{i}" for i in range(len(time_model.quantities_names))
]

# get from the mdu file
start_time = "minutes since 2015-01-01 00:00:00"
forcing_model = TimToForcingConverter().convert(
time_model, start_time, units=units, user_defined_names=user_defined_names
)
data = {
"id": "L1",
"name": forcing.quantity,
Expand All @@ -515,8 +521,8 @@ def convert(
"ycoordinates": polyline.y,
"zsource": z_source,
"zsink": z_sink,
"bc_forcing": forcing_model,
}
data = data | time_series
new_block = SourceSink(**data)

return new_block
Expand Down
40 changes: 32 additions & 8 deletions tests/dflowfm/ext/test_ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,10 +216,39 @@ def test_initialize_with_time_series_file(self, time_series_file: Path):
assert meteo.forcingfiletype == MeteoForcingFileType.bcascii


forcing_base_list = [
{
"name": "user_defined_name_1",
"function": "timeseries",
"timeinterpolation": "linear",
"quantity": ["time", "discharge"],
"unit": ["minutes since 2015-01-01 00:00:00", "m3/s"],
"datablock": [[1], [1.1234]],
},
{
"name": "user_defined_name_1",
"function": "timeseries",
"timeinterpolation": "linear",
"quantity": ["time", "salinitydelta"],
"unit": ["minutes since 2015-01-01 00:00:00", "ppt"],
"datablock": [[1, 2, 3, 4, 5], [3.0, 5.0, 12.0, 9.0, 23.0]],
},
{
"name": "user_defined_name_2",
"function": "timeseries",
"timeinterpolation": "linear",
"quantity": ["time", "temperature"],
"unit": ["minutes since 2015-01-01 00:00:00", "C"],
"datablock": [[1, 2, 3, 4, 5], [2.0, 2.0, 5.0, 8.0, 10.0]],
},
]


class TestSourceSink:

@pytest.fixture
def source_sink_data(self) -> Dict[str, Any]:

data = {
"id": "L1",
"name": "discharge_salinity_temperature_sorsin",
Expand All @@ -229,9 +258,7 @@ def source_sink_data(self) -> Dict[str, Any]:
"ycoordinates": [12.950216, 6.350155],
"zsource": -3.0,
"zsink": -4.2,
"discharge": 1.1234,
"temperaturedelta": [2.0, 2.0, 5.0, 8.0, 10.0],
"salinitydelta": [3.0, 5.0, 12.0, 9.0, 23.0],
"bc_forcing": ForcingModel(**{"forcing": forcing_base_list}),
"area": 5,
}
return data
Expand Down Expand Up @@ -287,10 +314,7 @@ def test_time_series_discharge_case(self):
"ycoordinates": [12.950216, 6.350155],
"zsource": -3.0,
"zsink": -4.2,
"discharge": [1.0, 2.0, 3.0, 5.0, 8.0],
"temperaturedelta": [2.0, 2.0, 5.0, 8.0, 10.0],
"salinitydelta": [3.0, 5.0, 12.0, 9.0, 23.0],
"bc_forcing": ForcingModel(**{"forcing": forcing_base_list}),
}

source_sink = SourceSink(**data)
print(source_sink)
assert SourceSink(**data)
85 changes: 73 additions & 12 deletions tests/tools/test_converters_source_sink.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ def test_parse_tim_model(self, tim_file, ext_file_quantity_list, expected_data):
time_series_data = converter.parse_tim_model(
tim_file, ext_file_quantity_list
)
assert time_series_data == expected_data
data = time_series_data.as_dataframe().to_dict(orient="list")
assert data == expected_data

@pytest.mark.parametrize(
"tim_file, ext_file_quantity_list, mdu_quantities, expected_data",
Expand Down Expand Up @@ -151,7 +152,8 @@ def test_parse_tim_model_with_mdu(
time_series_data = converter.parse_tim_model(
tim_file, ext_file_quantity_list, **mdu_quantities
)
assert time_series_data == expected_data
data = time_series_data.as_dataframe().to_dict(orient="list")
assert data == expected_data


class TestSourceSinkConverter:
Expand Down Expand Up @@ -231,13 +233,35 @@ def test_default(self):
converter = SourceSinkConverter()
converter.root_dir = "tests/data/input/source-sink"
new_quantity_block = converter.convert(forcing, ext_file_other_quantities)
assert new_quantity_block.initialtracer_anyname == [4.0, 4.0, 4.0, 4.0, 4.0]
assert new_quantity_block.temperaturedelta == [3.0, 3.0, 3.0, 3.0, 3.0]
assert new_quantity_block.salinitydelta == [2.0, 2.0, 2.0, 2.0, 2.0]
assert new_quantity_block.discharge == [1.0, 1.0, 1.0, 1.0, 1.0]

assert new_quantity_block.zsink == [-4.2]
assert new_quantity_block.zsource == [-3]

# check the converted bc_forcing
bc_forcing = new_quantity_block.bc_forcing
forcing_bases = bc_forcing.forcing
assert [forcing_bases[i].quantityunitpair[1].quantity for i in range(4)] == [
"discharge",
"salinitydelta",
"temperaturedelta",
"initialtracer_anyname",
]
assert [forcing_bases[i].quantityunitpair[1].unit for i in range(4)] == [
"m3/s",
"ppt",
"C",
"Unknown",
]
# check the values of the data block
# initialtracer_anyname
assert forcing_bases[3].datablock[1] == [4.0, 4.0, 4.0, 4.0, 4.0]
# temperature
assert forcing_bases[2].datablock[1] == [3.0, 3.0, 3.0, 3.0, 3.0]
# salinity
assert forcing_bases[1].datablock[1] == [2.0, 2.0, 2.0, 2.0, 2.0]
# discharge
assert forcing_bases[0].datablock[1] == [1.0, 1.0, 1.0, 1.0, 1.0]

def test_4_5_columns_polyline(self):
"""
The test case is based on the assumptions of the default test plus the following changes:
Expand Down Expand Up @@ -290,13 +314,35 @@ def test_4_5_columns_polyline(self):
tim_file = Path("leftsor.tim")
with patch("pathlib.Path.with_suffix", return_value=tim_file):
new_quantity_block = converter.convert(forcing, ext_file_other_quantities)
assert new_quantity_block.initialtracer_anyname == [4.0, 4.0, 4.0, 4.0, 4.0]
assert new_quantity_block.temperaturedelta == [3.0, 3.0, 3.0, 3.0, 3.0]
assert new_quantity_block.salinitydelta == [2.0, 2.0, 2.0, 2.0, 2.0]
assert new_quantity_block.discharge == [1.0, 1.0, 1.0, 1.0, 1.0]

assert new_quantity_block.zsink == [-4.2, -5.35]
assert new_quantity_block.zsource == [-3, -2.90]

# check the converted bc_forcing
bc_forcing = new_quantity_block.bc_forcing
forcing_bases = bc_forcing.forcing
assert [forcing_bases[i].quantityunitpair[1].quantity for i in range(4)] == [
"discharge",
"salinitydelta",
"temperaturedelta",
"initialtracer_anyname",
]
assert [forcing_bases[i].quantityunitpair[1].unit for i in range(4)] == [
"m3/s",
"ppt",
"C",
"Unknown",
]
# check the values of the data block
# initialtracer_anyname
assert forcing_bases[3].datablock[1] == [4.0, 4.0, 4.0, 4.0, 4.0]
# temperature
assert forcing_bases[2].datablock[1] == [3.0, 3.0, 3.0, 3.0, 3.0]
# salinity
assert forcing_bases[1].datablock[1] == [2.0, 2.0, 2.0, 2.0, 2.0]
# discharge
assert forcing_bases[0].datablock[1] == [1.0, 1.0, 1.0, 1.0, 1.0]

def test_no_temperature_no_salinity(self):
"""
The test case is based on the assumptions of the default test plus the following changes:
Expand Down Expand Up @@ -334,7 +380,22 @@ def test_no_temperature_no_salinity(self):
tim_file = Path("no_temperature_no_salinity.tim")
with patch("pathlib.Path.with_suffix", return_value=tim_file):
new_quantity_block = converter.convert(forcing, ext_file_other_quantities)
assert new_quantity_block.initialtracer_anyname == [4.0, 4.0, 4.0, 4.0, 4.0]
assert new_quantity_block.discharge == [1.0, 1.0, 1.0, 1.0, 1.0]

assert new_quantity_block.zsink == [-4.2]
assert new_quantity_block.zsource == [-3]
# check the converted bc_forcing
bc_forcing = new_quantity_block.bc_forcing
forcing_bases = bc_forcing.forcing
assert [forcing_bases[i].quantityunitpair[1].quantity for i in range(2)] == [
"discharge",
"initialtracer_anyname",
]
assert [forcing_bases[i].quantityunitpair[1].unit for i in range(2)] == [
"m3/s",
"Unknown",
]
# check the values of the data block
# initialtracer_anyname
assert forcing_bases[1].datablock[1] == [4.0, 4.0, 4.0, 4.0, 4.0]
# discharge
assert forcing_bases[0].datablock[1] == [1.0, 1.0, 1.0, 1.0, 1.0]

0 comments on commit 52a5766

Please sign in to comment.