From a80833c851fe107f4952f5057912e07f9ef72cdc Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 1 Sep 2023 16:58:00 +0200 Subject: [PATCH 01/41] Fix flake config --- setup.cfg | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index 4d92d41..374e480 100644 --- a/setup.cfg +++ b/setup.cfg @@ -43,10 +43,14 @@ install_requires = [flake8] ignore = - E203 # whitespace before ':' - doesn't work well with black - E402 # module level import not at top of file - E501 # line too long - let black worry about that - W503 # line break before binary operator + # whitespace before ':' - doesn't work well with black + E203, + # module level import not at top of file + E402, + # line too long - let black worry about that + E501, + # line break before binary operator + W503, per-file-ignores = __init__.py:F401 ./docs/conf.py:F401 From 0fd523d93b02310b04309d94b2fa97314ddea171 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 1 Sep 2023 16:58:32 +0200 Subject: [PATCH 02/41] Add LeakyBuildingPit --- gistim/common.py | 4 + gistim/timml_elements.py | 7 +- gistim/ttim_elements.py | 4 +- plugin/qgistim/core/processing.py | 1 - plugin/qgistim/core/tim_elements.py | 140 ++++++++++++++++------------ 5 files changed, 90 insertions(+), 66 deletions(-) diff --git a/gistim/common.py b/gistim/common.py index 4f918ee..dadd633 100644 --- a/gistim/common.py +++ b/gistim/common.py @@ -163,6 +163,10 @@ def aquifer_data( if "rate" in dataframe: d["N"] = dataframe.loc[0, "rate"] + # For leaky building pit + if "resistance" in dataframe: + d["res"] = dataframe.loc[0, "resistance"] + filtered = {} for k, value in d.items(): if isinstance(value, np.ndarray): diff --git a/gistim/timml_elements.py b/gistim/timml_elements.py index 09c963a..9b910ae 100644 --- a/gistim/timml_elements.py +++ b/gistim/timml_elements.py @@ -59,7 +59,7 @@ def observation(spec: ElementSpecification) -> List[Dict[str, Any]]: X, Y = point_coordinates(dataframe) kwargslist = [] kwargslist = [] - for (row, x, y) in zip(dataframe.to_dict("records"), X, Y): + for row, x, y in zip(dataframe.to_dict("records"), X, Y): row = filter_nan(row) kwargslist.append( { @@ -75,7 +75,7 @@ def well(spec: ElementSpecification) -> List[Dict[str, Any]]: dataframe = spec.dataframe X, Y = point_coordinates(dataframe) kwargslist = [] - for (row, x, y) in zip(dataframe.to_dict("records"), X, Y): + for row, x, y in zip(dataframe.to_dict("records"), X, Y): row = filter_nan(row) kwargslist.append( { @@ -95,7 +95,7 @@ def headwell(spec: ElementSpecification) -> List[Dict[str, Any]]: dataframe = spec.dataframe X, Y = point_coordinates(dataframe) kwargslist = [] - for (row, x, y) in zip(dataframe.to_dict("records"), X, Y): + for row, x, y in zip(dataframe.to_dict("records"), X, Y): row = filter_nan(row) kwargslist.append( { @@ -369,6 +369,7 @@ def headgrid(model: timml.Model, extent: Tuple[float], cellsize: float) -> xr.Da "Leaky Line Doublet": (leakylinedoublet, timml.LeakyLineDoubletString), "Impermeable Line Doublet": (implinedoublet, timml.ImpLineDoubletString), "Building Pit": (buildingpit, timml.BuildingPit), + "Leaky Building Pit": (buildingpit, timml.LeakyBuildingPit), "Observation": (observation, None), } diff --git a/gistim/ttim_elements.py b/gistim/ttim_elements.py index 1885752..f6a4f19 100644 --- a/gistim/ttim_elements.py +++ b/gistim/ttim_elements.py @@ -121,7 +121,7 @@ def well( dataframe = spec.steady_spec.dataframe X, Y = point_coordinates(dataframe) kwargslist = [] - for (row, x, y) in zip(dataframe.to_dict("records"), X, Y): + for row, x, y in zip(dataframe.to_dict("records"), X, Y): row = filter_nan(row) kwargslist.append( { @@ -146,7 +146,7 @@ def headwell( dataframe = spec.steady_spec.dataframe X, Y = point_coordinates(dataframe) kwargslist = [] - for (row, x, y) in zip(dataframe.to_dict("records"), X, Y): + for row, x, y in zip(dataframe.to_dict("records"), X, Y): row = filter_nan(row) kwargslist.append( { diff --git a/plugin/qgistim/core/processing.py b/plugin/qgistim/core/processing.py index b885973..6086168 100644 --- a/plugin/qgistim/core/processing.py +++ b/plugin/qgistim/core/processing.py @@ -78,7 +78,6 @@ def steady_contours( stop: float, step: float, ) -> QgsVectorLayer: - contourer = QgsMeshContours(layer) # Collect contours from mesh layer diff --git a/plugin/qgistim/core/tim_elements.py b/plugin/qgistim/core/tim_elements.py index 1020c1a..9362441 100644 --- a/plugin/qgistim/core/tim_elements.py +++ b/plugin/qgistim/core/tim_elements.py @@ -44,8 +44,14 @@ * Observations: triangle * Line Doublets: red * Polygons: Line and Fill same color, Fill color 15% opacity. + +Nota bene: the order of the columns is important for hiding and showing the +transient columns. QGIS has a bug which causes it to show the wrong column if +hidden columns appear before shown ones. This only affects the attribute table +when it has no features. """ +import abc import re from collections import defaultdict from functools import partial @@ -89,55 +95,6 @@ TRANSPARENT_GREY = f"135,135,135,{OPACITY}" -# These columns are reused by Aquifer and Polygon Inhom, Building pit Aquitards -# are on top of the aquifer, so it comes first Nota bene: the order of these is -# important for hiding and showing the transient columns. QGIS has a bug which -# causes it to show the wrong column if hidden columns appear before shown -# ones. This only affects the attribute table when it has no features. -AQUIFER_ATTRIBUTES = [ - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - QgsField("aquitard_s", QVariant.Double), - QgsField("aquifer_s", QVariant.Double), - QgsField("aquitard_npor", QVariant.Double), - QgsField("aquifer_npor", QVariant.Double), -] -INHOM_ATTRIBUTES = [ - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - QgsField("rate", QVariant.Double), - QgsField("aquitard_s", QVariant.Double), - QgsField("aquifer_s", QVariant.Double), - QgsField("aquitard_npor", QVariant.Double), - QgsField("aquifer_npor", QVariant.Double), -] -BUILDING_PIT_ATTRIBUTES = [ - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - QgsField("aquitard_s", QVariant.Double), - QgsField("aquifer_s", QVariant.Double), - QgsField("aquitard_npor", QVariant.Double), - QgsField("aquifer_npor", QVariant.Double), -] - - class NameDialog(QDialog): def __init__(self, parent=None): super().__init__(parent) @@ -432,7 +389,19 @@ def update_extent(self, iface: Any) -> Tuple[float, float]: class Aquifer(TransientElement): element_type = "Aquifer" geometry_type = "No Geometry" - timml_attributes = AQUIFER_ATTRIBUTES.copy() + timml_attributes = [ + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("aquitard_s", QVariant.Double), + QgsField("aquifer_s", QVariant.Double), + QgsField("aquitard_npor", QVariant.Double), + QgsField("aquifer_npor", QVariant.Double), + ] ttim_attributes = ( QgsField("time_min", QVariant.Double), QgsField("time_max", QVariant.Double), @@ -772,7 +741,21 @@ class PolygonInhomogeneity(AssociatedElement): QgsField("order", QVariant.Int), QgsField("ndegrees", QVariant.Int), ) - assoc_attributes = INHOM_ATTRIBUTES.copy() + assoc_attributes = [ + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("rate", QVariant.Double), + QgsField("aquitard_s", QVariant.Double), + QgsField("aquifer_s", QVariant.Double), + QgsField("aquitard_npor", QVariant.Double), + QgsField("aquifer_npor", QVariant.Double), + ] timml_defaults = { "order": QgsDefaultValue("4"), "ndegrees": QgsDefaultValue("6"), @@ -806,8 +789,7 @@ def on_transient_changed(self, transient: bool): return -class BuildingPit(AssociatedElement): - element_type = "Building Pit" +class AbstractBuildingPit(AssociatedElement, abc.ABC): geometry_type = "Polygon" timml_attributes = ( QgsField("inhomogeneity_id", QVariant.Int), @@ -815,7 +797,6 @@ class BuildingPit(AssociatedElement): QgsField("ndegrees", QVariant.Int), QgsField("layer", QVariant.Int), ) - assoc_attributes = BUILDING_PIT_ATTRIBUTES.copy() timml_defaults = { "inhomogeneity_id": QgsDefaultValue("1"), "order": QgsDefaultValue("4"), @@ -831,12 +812,6 @@ class BuildingPit(AssociatedElement): "aquifer_npor", ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( - color=TRANSPARENT_RED, color_border=RED, width_border="0.75" - ) - def on_transient_changed(self, transient: bool): config = self.assoc_layer.attributeTableConfig() columns = config.columns() @@ -849,6 +824,50 @@ def on_transient_changed(self, transient: bool): return +class BuildingPit(AbstractBuildingPit): + element_type = "Building Pit" + assoc_attributes = [ + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + ] + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_RED, color_border=RED, width_border="0.75" + ) + + +class LeakyBuildingPit(AbstractBuildingPit): + element_type = "Leaky Building Pit" + assoc_attributes = [ + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("resistance", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + ] + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_RED, + color_border=RED, + width_border="0.75", + outline_style="dash", + ) + + ELEMENTS = { element.element_type: element for element in ( @@ -867,6 +886,7 @@ def on_transient_changed(self, transient: bool): PolygonSemiConfinedTop, PolygonInhomogeneity, BuildingPit, + LeakyBuildingPit, Observation, ) } From 89f69e0a53145328db1a1bda769c61803d9ee03a Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Wed, 6 Sep 2023 21:06:54 +0200 Subject: [PATCH 03/41] Start refactoring to allow validation --- plugin/qgistim/core/elements/__init__.py | 77 +++ plugin/qgistim/core/elements/aquifer.py | 101 ++++ plugin/qgistim/core/elements/building_pit.py | 0 .../core/elements/circular_area_sink.py | 17 + plugin/qgistim/core/elements/colors.py | 13 + plugin/qgistim/core/elements/constant.py | 39 ++ plugin/qgistim/core/elements/domain.py | 73 +++ plugin/qgistim/core/elements/element.py | 448 ++++++++++++++++++ .../qgistim/core/elements/head_line_sink.py | 106 +++++ plugin/qgistim/core/elements/headwell.py | 17 + .../core/elements/impermeable_line_doublet.py | 0 .../core/elements/leaky_building_pit.py | 0 .../core/elements/leaky_line_doublet.py | 0 .../qgistim/core/elements/line_sink_ditch.py | 0 plugin/qgistim/core/elements/observation.py | 42 ++ .../core/elements/polygon_area_sink.py | 0 .../core/elements/polygon_inhomogeneity.py | 85 ++++ plugin/qgistim/core/elements/uniform_flow.py | 21 + plugin/qgistim/core/elements/well.py | 120 +++++ plugin/qgistim/core/extractor.py | 66 +++ plugin/qgistim/core/schemata.py | 321 +++++++++++++ plugin/qgistim/core/tim_elements.py | 13 +- plugin/qgistim/widgets/dataset_widget.py | 25 +- plugin/qgistim/widgets/elements_widget.py | 2 +- plugin/qgistim/widgets/error_window.py | 18 + 25 files changed, 1600 insertions(+), 4 deletions(-) create mode 100644 plugin/qgistim/core/elements/__init__.py create mode 100644 plugin/qgistim/core/elements/aquifer.py create mode 100644 plugin/qgistim/core/elements/building_pit.py create mode 100644 plugin/qgistim/core/elements/circular_area_sink.py create mode 100644 plugin/qgistim/core/elements/colors.py create mode 100644 plugin/qgistim/core/elements/constant.py create mode 100644 plugin/qgistim/core/elements/domain.py create mode 100644 plugin/qgistim/core/elements/element.py create mode 100644 plugin/qgistim/core/elements/head_line_sink.py create mode 100644 plugin/qgistim/core/elements/headwell.py create mode 100644 plugin/qgistim/core/elements/impermeable_line_doublet.py create mode 100644 plugin/qgistim/core/elements/leaky_building_pit.py create mode 100644 plugin/qgistim/core/elements/leaky_line_doublet.py create mode 100644 plugin/qgistim/core/elements/line_sink_ditch.py create mode 100644 plugin/qgistim/core/elements/observation.py create mode 100644 plugin/qgistim/core/elements/polygon_area_sink.py create mode 100644 plugin/qgistim/core/elements/polygon_inhomogeneity.py create mode 100644 plugin/qgistim/core/elements/uniform_flow.py create mode 100644 plugin/qgistim/core/elements/well.py create mode 100644 plugin/qgistim/core/extractor.py create mode 100644 plugin/qgistim/core/schemata.py create mode 100644 plugin/qgistim/widgets/error_window.py diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py new file mode 100644 index 0000000..26b054f --- /dev/null +++ b/plugin/qgistim/core/elements/__init__.py @@ -0,0 +1,77 @@ +import re +from collections import defaultdict +from functools import partial +from typing import List, Tuple + +from qgistim.core import geopackage +from qgistim.core.elements.aquifer import Aquifer +from qgistim.core.elements.constant import Constant +from qgistim.core.elements.domain import Domain +from qgistim.core.elements.element import Element +from qgistim.core.elements.uniform_flow import UniformFlow +from qgistim.core.elements.well import Well + +ELEMENTS = { + element.element_type: element + for element in ( + Aquifer, + Domain, + Constant, + UniformFlow, + Well, + ) +} + + +def parse_name(layername: str) -> Tuple[str, str, str]: + """ + Based on the layer name find out: + + * whether it's a timml or ttim element; + * which element type it is; + * what the user provided name is. + + For example: + parse_name("timml Headwell: drainage") -> ("timml", "Head Well", "drainage") + + This function can also be found in gistim.common + """ + prefix, name = layername.split(":") + element_type = re.split("timml |ttim ", prefix)[1] + mapping = { + "Computation Times": "Domain", + "Temporal Settings": "Aquifer", + "Polygon Inhomogeneity Properties": "Polygon Inhomogeneity", + "Building Pit Properties": "Building Pit", + "Leaky Building Pit Properties": "Building Pit", + } + element_type = mapping.get(element_type, element_type) + if "timml" in prefix: + if "Properties" in prefix: + tim_type = "timml_assoc" + else: + tim_type = "timml" + elif "ttim" in prefix: + tim_type = "ttim" + else: + raise ValueError("Neither timml nor ttim in layername") + return tim_type, element_type, name + + +def load_elements_from_geopackage(path: str) -> List[Element]: + # List the names in the geopackage + gpkg_names = geopackage.layers(path) + + # Group them on the basis of name + dd = defaultdict + grouped_names = dd(partial(dd, partial(dd, list))) + for layername in gpkg_names: + tim_type, element_type, name = parse_name(layername) + grouped_names[element_type][name][tim_type] = layername + + elements = [] + for element_type, group in grouped_names.items(): + for name in group: + elements.append(ELEMENTS[element_type](path, name)) + + return elements diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py new file mode 100644 index 0000000..3ffae7b --- /dev/null +++ b/plugin/qgistim/core/elements/aquifer.py @@ -0,0 +1,101 @@ +from collections import defaultdict +from dataclasses import dataclass +from typing import Any, Dict, List + +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField +from qgistim.core import geopackage +from qgistim.core.elements.element import TransientElement +from qgistim.core.schemata import ( + AllGreaterEqual, + AllOptional, + AllRequired, + FirstOnly, + OffsetAllRequired, + Positive, + Range, + SemiConfined, + StrictlyDecreasing, +) + + +class Aquifer(TransientElement): + element_type = "Aquifer" + geometry_type = "No Geometry" + timml_attributes = [ + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("aquitard_s", QVariant.Double), + QgsField("aquifer_s", QVariant.Double), + QgsField("aquitard_npor", QVariant.Double), + QgsField("aquifer_npor", QVariant.Double), + ] + ttim_attributes = ( + QgsField("time_min", QVariant.Double), + QgsField("time_max", QVariant.Double), + QgsField("time_start", QVariant.Double), + QgsField("stehfest_M", QVariant.Int), + QgsField("reference_date", QVariant.DateTime), + ) + ttim_defaults = { + "time_min": QgsDefaultValue("0.01"), + "time_max": QgsDefaultValue("10.0"), + "time_start": QgsDefaultValue("0.0"), + "stehfest_M": QgsDefaultValue("10"), + } + transient_columns = ( + "aquitard_s", + "aquifer_s", + "aquitard_npor", + "aquifer_npor", + ) + + timml_schemata = { + "layer": AllRequired(Range()), + "aquifer_top": AllRequired(StrictlyDecreasing()), + "aquifer_bottom": AllRequired(StrictlyDecreasing()), + "aquifer_k": AllRequired(Positive()), + "aquitard_c": OffsetAllRequired(Positive()), + "semiconf_top": AllOptional(FirstOnly()), + "semiconf_head": AllOptional(FirstOnly()), + } + timml_global_schemata = ( + SemiConfined(), + AllGreaterEqual("aquifer_top", "aquifer_bottom"), + ) + ttim_schemata = { + "aquitard_s": OffsetAllRequired(Positive()), + "aquifer_s": AllRequired(Positive()), + } + + def __init__(self, path: str, name: str): + self._initialize_default(path, name) + self.timml_name = f"timml {self.element_type}:Aquifer" + self.ttim_name = "ttim Temporal Settings:Aquifer" + + def write(self): + self.timml_layer = geopackage.write_layer( + self.path, self.timml_layer, self.timml_name, newfile=True + ) + self.ttim_layer = geopackage.write_layer( + self.path, self.ttim_layer, self.ttim_name + ) + self.set_defaults() + + def remove_from_geopackage(self): + """This element may not be removed.""" + return + + def to_timml(self): + data = self.table_to_dict(self.timml_layer) + self.validate_timml(data) + return self.aquifer_data(data, transient=False) + + def to_ttim(self): + data = self.table_to_dict(self.timml_layer) + return self.aquifer_data(data, transient=True) diff --git a/plugin/qgistim/core/elements/building_pit.py b/plugin/qgistim/core/elements/building_pit.py new file mode 100644 index 0000000..e69de29 diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py new file mode 100644 index 0000000..cebeb36 --- /dev/null +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -0,0 +1,17 @@ +class CircularAreaSinkSchema: + schemata = { + "rate": Required(), + "layer": Required(Membership("layers")), + } + + +class TransientCircularAreaSinkSchema: + global_schemata = ( + AllOrNone("time_start", "time_end", "rate_transient"), + NotBoth("time_start", "timeseries_id"), + ) + schemata = { + "time_start": Optional(Time()), + "time_end": Optional(Time()), + "timeseries_id": Optional(Membership("timeseries_ids")), + } diff --git a/plugin/qgistim/core/elements/colors.py b/plugin/qgistim/core/elements/colors.py new file mode 100644 index 0000000..302e338 --- /dev/null +++ b/plugin/qgistim/core/elements/colors.py @@ -0,0 +1,13 @@ +RED = "215,48,39,255" +GREEN = "51,160,44,255" +BLUE = "31,120,180,255" +GREY = "135,135,135,255" +BLACK = "0,0,0,255" +LIGHT_BLUE = "166,206,227,255" + +OPACITY = 51 # 20% +TRANSPARENT = "255,255,255,0" +TRANSPARENT_RED = f"215,48,39,{OPACITY}" +TRANSPARENT_GREEN = f"51,160,44,{OPACITY}" +TRANSPARENT_BLUE = f"31,120,180,{OPACITY}" +TRANSPARENT_GREY = f"135,135,135,{OPACITY}" diff --git a/plugin/qgistim/core/elements/constant.py b/plugin/qgistim/core/elements/constant.py new file mode 100644 index 0000000..635e3e5 --- /dev/null +++ b/plugin/qgistim/core/elements/constant.py @@ -0,0 +1,39 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import RED +from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.schemata import Membership, Required, SingleRow + + +class ConstantSchema(ElementSchema): + global_schemata = (SingleRow(),) + schemata = { + "head": Required(), + "layer": Membership("layers"), + } + + +class Constant(Element): + element_type = "Constant" + geometry_type = "Point" + timml_attributes = ( + QgsField("head", QVariant.Double), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + ) + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.marker_renderer(color=RED, name="star", size="5") + + def to_timml(self): + data = self.to_dict() + # TODO: validate + data = data[0] + return { + "xr": data["geometry"][0][0], + "yr": data["geometry"][0][1], + "hr": data["head"], + "layer": data["layer"], + "label": data["label"], + } diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py new file mode 100644 index 0000000..3dc8e2b --- /dev/null +++ b/plugin/qgistim/core/elements/domain.py @@ -0,0 +1,73 @@ +from typing import Any, Tuple + +from PyQt5.QtCore import QVariant +from qgis.core import ( + QgsFeature, + QgsField, + QgsGeometry, + QgsPointXY, + QgsSingleSymbolRenderer, +) +from qgistim.core.elements.colors import BLACK +from qgistim.core.elements.element import TransientElement + + +class Domain(TransientElement): + element_type = "Domain" + geometry_type = "Polygon" + ttim_attributes = (QgsField("time", QVariant.Double),) + + def __init__(self, path: str, name: str): + self._initialize_default(path, name) + self.timml_name = f"timml {self.element_type}:Domain" + self.ttim_name = "ttim Computation Times:Domain" + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + """ + Results in transparent fill, with a medium thick black border line. + """ + return self.polygon_renderer( + color="255,0,0,0", color_border=BLACK, width_border="0.75" + ) + + def remove_from_geopackage(self): + pass + + def update_extent(self, iface: Any) -> Tuple[float, float]: + provider = self.timml_layer.dataProvider() + provider.truncate() # removes all features + canvas = iface.mapCanvas() + extent = canvas.extent() + xmin = extent.xMinimum() + ymin = extent.yMinimum() + xmax = extent.xMaximum() + ymax = extent.yMaximum() + points = [ + QgsPointXY(xmin, ymax), + QgsPointXY(xmax, ymax), + QgsPointXY(xmax, ymin), + QgsPointXY(xmin, ymin), + ] + feature = QgsFeature() + feature.setGeometry(QgsGeometry.fromPolygonXY([points])) + provider.addFeatures([feature]) + canvas.refresh() + return ymax, ymin + + def to_timml(self): + data = self.to_dict(self.timml_layer) + x = [point[0] for point in data[0]["geometry"]] + y = [point[1] for point in data[0]["geometry"]] + return { + "xmin": min(x), + "xmax": max(x), + "ymin": min(y), + "ymax": max(y), + } + + def to_ttim(self): + data = self.to_timml() + ttim_data = self.table_to_dict(self.ttim_layer) + data["time"] = ttim_data["time"] + return data diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py new file mode 100644 index 0000000..81ad441 --- /dev/null +++ b/plugin/qgistim/core/elements/element.py @@ -0,0 +1,448 @@ +""" +This module contains the classes to represent the TimML and TTim input layers. + +The classes specify: + +* The (unabbreviated) name +* The type of geometry (No geometry, point, linestring, polygon) +* The required attributes of the attribute table + +They contain logic for setting up: + +* Simple input, requiring a single table, e.g. Uniform Flow or Constant +* Transient input, requiring two tables, one with geometry and steady-state + properties, and one containing optional time series input e.g. Well, Head Line + Sink. +* Associated input, requiring two tables, one with geometry and one with input + for layers, e.g. Polygon Inhomogeneity or Building Pit. + +Each element is (optionally) represented in multiple places: + +* It always lives in a GeoPackage. +* While a geopackage is active within plugin, it is always represented in a + Dataset Tree: the Dataset Tree provides a direct look at the state of the + GeoPackage. In this tree, steady and transient input are on the same row. + Associated input is, to potentially enable transient associated data later + on (like a building pit with changing head top boundary). +* It can be added to the Layers Panel in QGIS. This enables a user to visualize + and edit its data. + +Some elements require specific rendering in QGIS (e.g. no fill polygons), which +are supplied by the `.renderer` property. + +The coupling of separate tables (geometry table and time series table) is only +explicit in the Dataset Tree. The only way of knowing that tables are +associated with each other is by comparing names. Names must therefore be +unique within a group of the same type of elements. + +Rendering: + +* Fixed discharge (area sink, well, ditch): green +* Fixed head (head well, semi-confined top, head line): blue +* Inhomogeneity: grey +* Constant: star +* Observations: triangle +* Line Doublets: red +* Polygons: Line and Fill same color, Fill color 15% opacity. + +Nota bene: the order of the columns is important for hiding and showing the +transient columns. QGIS has a bug which causes it to show the wrong column if +hidden columns appear before shown ones. This only affects the attribute table +when it has no features. +""" + +import abc +from collections import defaultdict +from typing import Any, List + +from PyQt5.QtWidgets import ( + QDialog, + QHBoxLayout, + QLabel, + QLineEdit, + QPushButton, + QVBoxLayout, +) +from qgis.core import ( + QgsFillSymbol, + QgsLineSymbol, + QgsMarkerSymbol, + QgsSingleSymbolRenderer, + QgsVectorLayer, +) +from qgistim.core import geopackage +from qgistim.core.extractor import ExtractorMixin +from qgistim.core.schemata import ValidationError + + +class ElementSchema(abc.ABC): + def validate(self, data, **kwargs): + for key, value in self.schemata.items(): + value.validate(data[key], **kwargs) + return + + +class NameDialog(QDialog): + def __init__(self, parent=None): + super().__init__(parent) + self.name_line_edit = QLineEdit() + self.ok_button = QPushButton("OK") + self.cancel_button = QPushButton("Cancel") + self.ok_button.clicked.connect(self.accept) + self.cancel_button.clicked.connect(self.reject) + first_row = QHBoxLayout() + first_row.addWidget(QLabel("Layer name")) + first_row.addWidget(self.name_line_edit) + second_row = QHBoxLayout() + second_row.addStretch() + second_row.addWidget(self.ok_button) + second_row.addWidget(self.cancel_button) + layout = QVBoxLayout() + layout.addLayout(first_row) + layout.addLayout(second_row) + self.setLayout(layout) + + +class Element(ExtractorMixin): + """ + Abstract base class for "ordinary" timml elements. + """ + + element_type = None + geometry_type = None + timml_attributes = () + ttim_attributes = () + assoc_attributes = () + transient_columns = () + timml_defaults = {} + ttim_defaults = {} + assoc_defaults = {} + + def _initialize_default(self, path, name): + self.name = name + self.path = path + self.timml_name = None + self.ttim_name = None + self.assoc_name = None + self.timml_layer = None + self.ttim_layer = None + self.assoc_layer = None + self.item = None + + def __init__(self, path: str, name: str): + self._initialize_default(path, name) + self.timml_name = f"timml {self.element_type}:{name}" + + @classmethod + def dialog(cls, path: str, crs: Any, iface: Any, names: List[str]): + dialog = NameDialog() + dialog.show() + ok = dialog.exec_() + if not ok: + return + + name = dialog.name_line_edit.text() + if name in names: + raise ValueError(f"Name already exists in geopackage: {name}") + + instance = cls(path, name) + instance.create_layers(crs) + return instance + + def create_layer( + self, crs: Any, geometry_type: str, name: str, attributes: List + ) -> QgsVectorLayer: + layer = QgsVectorLayer(geometry_type, name, "memory") + provider = layer.dataProvider() + provider.addAttributes(attributes) + layer.updateFields() + layer.setCrs(crs) + return layer + + def create_timml_layer(self, crs: Any): + self.timml_layer = self.create_layer( + crs=crs, + geometry_type=self.geometry_type, + name=self.timml_name, + attributes=self.timml_attributes, + ) + + def create_ttim_layer(self, crs: Any): + pass + + def create_assoc_layer(self, crs: Any): + pass + + def create_layers(self, crs: Any): + self.create_timml_layer(crs) + self.create_ttim_layer(crs) + self.create_assoc_layer(crs) + + def set_defaults(self): + for layer, defaults in zip( + (self.timml_layer, self.ttim_layer, self.assoc_layer), + (self.timml_defaults, self.ttim_defaults, self.assoc_defaults), + ): + if layer is None: + continue + fields = layer.fields() + for name, definition in defaults.items(): + index = fields.indexFromName(name) + layer.setDefaultValueDefinition(index, definition) + return + + @staticmethod + def marker_renderer(**kwargs): + symbol = QgsMarkerSymbol.createSimple(kwargs) + return QgsSingleSymbolRenderer(symbol) + + @staticmethod + def line_renderer(**kwargs): + symbol = QgsLineSymbol.createSimple(kwargs) + return QgsSingleSymbolRenderer(symbol) + + @staticmethod + def polygon_renderer(**kwargs): + symbol = QgsFillSymbol.createSimple(kwargs) + return QgsSingleSymbolRenderer(symbol) + + @property + def renderer(self): + return None + + def timml_layer_from_geopackage(self) -> QgsVectorLayer: + self.timml_layer = QgsVectorLayer( + f"{self.path}|layername={self.timml_name}", self.timml_name + ) + + def ttim_layer_from_geopackage(self): + return + + def assoc_layer_from_geopackage(self): + return + + def load_layers_from_geopackage(self) -> None: + self.timml_layer_from_geopackage() + self.ttim_layer_from_geopackage() + self.assoc_layer_from_geopackage() + self.set_defaults() + return + + def write(self): + self.timml_layer = geopackage.write_layer( + self.path, self.timml_layer, self.timml_name + ) + self.set_defaults() + + def remove_from_geopackage(self): + geopackage.remove_layer(self.path, self.timml_name) + + def on_transient_changed(self, transient: bool): + if len(self.transient_columns) == 0: + return + + config = self.timml_layer.attributeTableConfig() + columns = config.columns() + + for i, column in enumerate(columns): + if column.name in self.transient_columns: + config.setColumnHidden(i, not transient) + + self.timml_layer.setAttributeTableConfig(config) + return + + @abc.abstractmethod + def to_timml(self, data, **kwargs): + """ """ + + def validate(self, data, **kwargs): + errors = [] + for schema in self.global_schemata: + try: + schema.validate(data, **kwargs) + except ValidationError as e: + errors.append(str(e)) + + for variable, schema in self.schemata: + variable_errors = [] + try: + schema.validate(data, **kwargs) + except ValidationError as e: + variable_errors.append(str(e)) + + if variable_errors: + errors.append(f"{variable:}\n") + errors.append(variable_errors) + + return errors + + @staticmethod + def transient_input(row, all_timeseries, variable: str, time_start): + timeseries_id = row["timeseries_id"] + row_start = row["time_start"] + row_end = row["time_end"] + steady_value = row[variable] + transient_value = row[f"{variable}_transient"] + + start_and_stop = ( + row_start is not None + and row_end is not None + and transient_value is not None + ) + + if start_and_stop: + return [(row_start, transient_value - steady_value), (row_end, 0.0)] + elif timeseries_id is not None: + timeseries = all_timeseries[timeseries_id] + return [ + (time, value - steady_value) + for time, value in zip(timeseries["time_start"], timeseries[variable]) + ] + else: + return [(time_start), 0.0] + + @staticmethod + def aquifer_data(data, transient: bool): + def interleave(a, b): + return [value for pair in zip(a, b) for value in pair] + + hstar = data["semiconf_head"][0] + semiconfined = ( + data["aquitard_c"][0] is not None + and data["semiconf_top"][0] is not None + and hstar is not None + ) + + kaq = data["aquifer_k"] + c = data["aquitard_c"] + z = [data["semiconf_top"][0]] + interleave( + data["aquifer_top"], data["aquifer_bottom"] + ) + porosity = interleave(data["aquitard_npor"], data["aquifer_npor"]) + s_aquifer = data["aquifer_s"] + s_aquitard = data["aquitard_s"] + topboundary = "semi" + + if semiconfined: + topboundary = "conf" + else: + topboundary = "semi" + z.pop(0) + c.pop(0) + porosity.pop(0) + s_aquitard.pop(0) + + aquifer = { + "kaq": kaq, + "z": z, + "c": c, + "npor": porosity, + } + + if transient: + aquifer["Saq"] = s_aquifer + aquifer["Sll"] = s_aquitard + aquifer["phreatictop"] = True + else: + aquifer["topboundary"] = topboundary + aquifer["hstar"] = hstar + + if "resistance" in data: + aquifer["res"] = data["resistance"][0] + if "rate" in data: + aquifer["N"] = data["rate"][0] + + return aquifer + + def validate_timml(self, data): + errors = defaultdict(list) + for variable, schema in self.timml_schemata.items(): + try: + schema.validate(data) + except ValidationError as e: + errors[variable].append(e) + + for schema in self.timml_global_schemata: + try: + schema.validate(data) + except ValidationError as e: + errors[""].append(e) + + return errors + + +class TransientElement(Element): + """ + Abstract base class for transient (ttim) elements. + """ + + def __init__(self, path: str, name: str): + self._initialize_default(path, name) + self.timml_name = f"timml {self.element_type}:{name}" + self.ttim_name = f"ttim {self.element_type}:{name}" + + def create_ttim_layer(self, crs: Any): + self.ttim_layer = self.create_layer( + crs=crs, + geometry_type="No Geometry", + name=self.ttim_name, + attributes=self.ttim_attributes, + ) + + def ttim_layer_from_geopackage(self): + self.ttim_layer = QgsVectorLayer( + f"{self.path}|layername={self.ttim_name}", + self.ttim_name, + ) + + def write(self): + self.timml_layer = geopackage.write_layer( + self.path, self.timml_layer, self.timml_name + ) + self.ttim_layer = geopackage.write_layer( + self.path, self.ttim_layer, self.ttim_name + ) + self.set_defaults() + + def remove_from_geopackage(self): + geopackage.remove_layer(self.path, self.timml_name) + geopackage.remove_layer(self.path, self.ttim_name) + + +class AssociatedElement(Element): + """ + Abstract class for elements that require associated tables such as + Inhomogenities. + """ + + def __init__(self, path: str, name: str): + self._initialize_default(path, name) + self.timml_name = f"timml {self.element_type}:{name}" + self.assoc_name = f"timml {self.element_type} Properties:{name}" + + def create_assoc_layer(self, crs: Any): + self.assoc_layer = self.create_layer( + crs=crs, + geometry_type="No Geometry", + name=self.assoc_name, + attributes=self.assoc_attributes, + ) + + def assoc_layer_from_geopackage(self): + self.assoc_layer = QgsVectorLayer( + f"{self.path}|layername={self.assoc_name}", + self.assoc_name, + ) + + def write(self): + self.timml_layer = geopackage.write_layer( + self.path, self.timml_layer, self.timml_name + ) + self.assoc_layer = geopackage.write_layer( + self.path, self.assoc_layer, self.assoc_name + ) + self.set_defaults() + + def remove_from_geopackage(self): + geopackage.remove_layer(self.path, self.timml_name) + geopackage.remove_layer(self.path, self.assoc_name) diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py new file mode 100644 index 0000000..c5aa1dd --- /dev/null +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -0,0 +1,106 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import BLUE +from qgistim.core.elements.element import TransientElement +from qgistim.core.schemata import ( + AllOrNone, + Membership, + NotBoth, + Optional, + Positive, + Required, + Time, +) + + +class HeadLineSinkSchema: + schemata = { + "head": Required(), + "resistance": Required(Positive()), + "width": Required(Positive()), + "order": Required(Positive()), + "layer": Required(Membership("layers")), + } + + +class TransientHeadLineSinkSchema: + global_schemata = ( + AllOrNone("time_start", "time_end", "head_transient"), + NotBoth("time_start", "timeseries_id"), + Time(), + ) + schemata = { + "time_start": Optional(Time()), + "time_end": Optional(Time()), + } + + +class HeadLineSink(TransientElement): + element_type = "Head Line Sink" + geometry_type = "Linestring" + timml_attributes = ( + QgsField("head", QVariant.Double), + QgsField("resistance", QVariant.Double), + QgsField("width", QVariant.Double), + QgsField("order", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + QgsField("time_start", QVariant.Double), + QgsField("time_end", QVariant.Double), + QgsField("head_transient", QVariant.Double), + QgsField("timeseries_id", QVariant.Int), + ) + ttim_attributes = ( + QgsField("timeseries_id", QVariant.Int), + QgsField("time_start", QVariant.Double), + QgsField("head", QVariant.Double), + ) + timml_defaults = { + "order": QgsDefaultValue("4"), + } + transient_columns = ( + "time_start", + "time_end", + "head_transient", + "timeseries_id", + ) + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.line_renderer(color=BLUE, width="0.75") + + def to_timml(self): + data = self.to_dict(self.timml_layer) + sinks = [] + for row in data: + sinks.append( + { + "xy": self.linestring_xy(row), + "hls": row["head"], + "res": row["resistance"], + "wh": row["width"], + "order": row["order"], + "layers": row["layer"], + "label": row["label"], + } + ) + return sinks + + def to_ttim(self, time_start): + data = self.to_dict(self.timml_layer) + ttim_data = self.table_to_dict(self.ttim_layer) + grouped = self.groupby(ttim_data, "timeseries_id") + sinks = [] + for row in data: + sinks.append( + { + "xy": self.linestring_xy(row), + "tsandh": self._transient_input(row, grouped, "head", time_start), + "res": row["resistance"], + "wh": row["width"], + "order": row["order"], + "layers": row["layer"], + "label": row["label"], + } + ) + return sinks diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py new file mode 100644 index 0000000..ca77a0b --- /dev/null +++ b/plugin/qgistim/core/elements/headwell.py @@ -0,0 +1,17 @@ +class HeadWellSchema: + schemata = { + "head": [AllValueSchema()], + "radius": [AllValueSchema(), PositiveSchema()], + "resistance": [AllValueSchema(), PositiveSchema()], + "layer": [AllValueSchema(), MemberschipSchema("layers")], + } + + +class TransientHeadWellSchema: + schemata = { + "timeseries_id": [MembershipSchema("timeseries_ids")], + } + global_schemata = ( + AllOrNoneSchema(("time_start", "time_end", "head_transient")), + XorSchema("time_start", "timeseries_id"), + ) diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py new file mode 100644 index 0000000..e69de29 diff --git a/plugin/qgistim/core/elements/leaky_building_pit.py b/plugin/qgistim/core/elements/leaky_building_pit.py new file mode 100644 index 0000000..e69de29 diff --git a/plugin/qgistim/core/elements/leaky_line_doublet.py b/plugin/qgistim/core/elements/leaky_line_doublet.py new file mode 100644 index 0000000..e69de29 diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py new file mode 100644 index 0000000..e69de29 diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py new file mode 100644 index 0000000..b3bbb95 --- /dev/null +++ b/plugin/qgistim/core/elements/observation.py @@ -0,0 +1,42 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import LIGHT_BLUE +from qgistim.core.elements.element import TransientElement + + +class Observation(TransientElement): + element_type = "Observation" + geometry_type = "Point" + timml_attributes = ( + QgsField("label", QVariant.String), + QgsField("timeseries_id", QVariant.Int), + ) + ttim_attributes = ( + QgsField("timeseries_id", QVariant.Int), + QgsField("time", QVariant.Double), + ) + timml_defaults = { + "timeseries_id": QgsDefaultValue("1"), + } + ttim_defaults = { + "timeseries_id": QgsDefaultValue("1"), + } + transient_columns = ("timeseries_id",) + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") + + def to_timml(self): + data = self.to_dict(self.timml_layer) + + observations = [] + for row in data: + observations.append( + { + "x": row["geometry"][0][0], + "y": row["geometry"][0][1], + "label": row["label"], + } + ) + return observations diff --git a/plugin/qgistim/core/elements/polygon_area_sink.py b/plugin/qgistim/core/elements/polygon_area_sink.py new file mode 100644 index 0000000..e69de29 diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py new file mode 100644 index 0000000..4c87a36 --- /dev/null +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -0,0 +1,85 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import GREY, TRANSPARENT_GREY +from qgistim.core.elements.element import AssociatedElement +from qgistim.core.schemata import ( + AllOrNone, + Membership, + NotBoth, + Optional, + Positive, + Required, + SemiConfined, + Time, +) + + +class PolygonInhomogeneity(AssociatedElement): + element_type = "Polygon Inhomogeneity" + geometry_type = "Polygon" + timml_attributes = ( + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("order", QVariant.Int), + QgsField("ndegrees", QVariant.Int), + ) + assoc_attributes = [ + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("rate", QVariant.Double), + QgsField("aquitard_s", QVariant.Double), + QgsField("aquifer_s", QVariant.Double), + QgsField("aquitard_npor", QVariant.Double), + QgsField("aquifer_npor", QVariant.Double), + ] + timml_defaults = { + "order": QgsDefaultValue("4"), + "ndegrees": QgsDefaultValue("6"), + "inhomogeneity_id": QgsDefaultValue("1"), + } + assoc_defaults = { + "inhomogeneity_id": QgsDefaultValue("1"), + } + transient_columns = ( + "aquitard_s", + "aquifer_s", + "aquitard_npor", + "aquifer_npor", + ) + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_GREY, color_border=GREY, width_border="0.75" + ) + + def on_transient_changed(self, transient: bool): + config = self.assoc_layer.attributeTableConfig() + columns = config.columns() + + for i, column in enumerate(columns): + if column.name in self.transient_columns: + config.setColumnHidden(i, not transient) + + self.assoc_layer.setAttributeTableConfig(config) + return + + def to_timml(self): + data = self.to_dict(self.timml_layer) + assoc_data = self.table_to_dict(self.assoc_layer) + grouped = self.groupby(assoc_data, "inhomogeneity_id") + + inhoms = [] + for row in data: + inhom_id = row["inhomogeneity_id"] + kwargs = self._aquifer_data(grouped[inhom_id]) + kwargs["xy"] = self._polygon_xy(row) + kwargs["order"] = row["order"] + kwargs["ndeg"] = row["ndegrees"] + inhoms.append(kwargs) + return inhoms diff --git a/plugin/qgistim/core/elements/uniform_flow.py b/plugin/qgistim/core/elements/uniform_flow.py new file mode 100644 index 0000000..d93364f --- /dev/null +++ b/plugin/qgistim/core/elements/uniform_flow.py @@ -0,0 +1,21 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsField +from qgistim.core.elements.element import Element + + +class UniformFlow(Element): + element_type = "Uniform Flow" + geometry_type = "No geometry" + timml_attributes = ( + QgsField("slope", QVariant.Double), + QgsField("angle", QVariant.Double), + QgsField("label", QVariant.String), + ) + + def to_timml(self): + data = self.to_dict(self.timml_layer) + return { + "slope": data["slope"], + "angle": data["angle"], + "label": data["label"], + } diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py new file mode 100644 index 0000000..5663d48 --- /dev/null +++ b/plugin/qgistim/core/elements/well.py @@ -0,0 +1,120 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import GREEN +from qgistim.core.elements.element import TransientElement +from qgistim.core.schemata import ( + AllOrNone, + Membership, + NotBoth, + Optional, + Positive, + Required, + Time, +) + + +class WellSchema: + schemata = { + "discharge": Required(), + "radius": Required(), + "resistance": Required(), + "layer": Required(Membership("layers")), + } + + +class TransientWellSchema: + schemata = { + "caisson_radius": Required(Positive), + "slug": Required(), + "time_start": Optional(Time()), + "time_end": Optional(Time()), + "timeseries_id": Optional(Membership("timeseries_ids")), + } + global_schemata = ( + AllOrNone(("time_start", "time_end", "discharge_transient")), + NotBoth("time_start", "timeseries_id"), + ) + + +class Well(TransientElement): + element_type = "Well" + geometry_type = "Point" + timml_attributes = ( + QgsField("discharge", QVariant.Double), + QgsField("radius", QVariant.Double), + QgsField("resistance", QVariant.Double), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + QgsField("time_start", QVariant.Double), + QgsField("time_end", QVariant.Double), + QgsField("discharge_transient", QVariant.Double), + QgsField("caisson_radius", QVariant.Double), + QgsField("slug", QVariant.Bool), + QgsField("timeseries_id", QVariant.Int), + ) + ttim_attributes = ( + QgsField("timeseries_id", QVariant.Int), + QgsField("time_start", QVariant.Double), + QgsField("discharge", QVariant.Double), + ) + timml_defaults = { + "radius": QgsDefaultValue("0.1"), + "resistance": QgsDefaultValue("0.0"), + "caisson_radius": QgsDefaultValue("0.0"), + "slug": QgsDefaultValue("True"), + } + transient_columns = ( + "time_start", + "time_end", + "discharge_transient", + "caisson_radius", + "slug", + "timeseries_id", + ) + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.marker_renderer(color=GREEN, size="3") + + def to_timml(self): + data = self.to_dict(self.timml_layer) + wells = [] + for row in data: + x, y = self.point_xy(row) + wells.append( + { + "xw": x, + "yw": y, + "Q": row["discharge"], + "rw": row["radius"], + "res": row["resistance"], + "layers": row["layer"], + "label": row["label"], + } + ) + return wells + + def to_ttim(self, time_start): + data = self.to_dict(self.timml_layer) + ttim_data = self.table_to_dict(self.ttim_layer) + grouped = self.groupby(ttim_data, "timeseries_id") + wells = [] + for row in data: + x, y = self.point_xy(row) + wells.append( + { + "xw": x, + "yw": y, + "tsandQ": self._transient_input( + row, grouped, "discharge", time_start + ), + "hw": row["head"], + "rw": row["radius"], + "res": row["resistance"], + "layers": row["layer"], + "label": row["label"], + "rc": row["caisson_radius"], + "wbstype": "slug" if row["slug"] else "pumping", + } + ) + return wells diff --git a/plugin/qgistim/core/extractor.py b/plugin/qgistim/core/extractor.py new file mode 100644 index 0000000..8bb1779 --- /dev/null +++ b/plugin/qgistim/core/extractor.py @@ -0,0 +1,66 @@ +import abc +from collections import defaultdict +from typing import Any, Dict + +from qgis.core import NULL + + +def remove_zero_length(geometry): + # This removes repeated vertices + return list(dict.fromkeys(geometry)) + + +class ExtractorMixin(abc.ABC): + @staticmethod + def argsort(seq): + return sorted(range(len(seq)), key=seq.__getitem__) + + @staticmethod + def extract_coordinates(feature): + geometry = feature.geometry() + coordinates = [] + for vertex in geometry.vertices(): + coordinates.append((vertex.x(), vertex.y())) + return coordinates + + @classmethod + def to_dict(cls, layer) -> Dict[str, Any]: + features = [] + for feature in layer.getFeatures(): + data = feature.attributeMap() + for key, value in data.items(): + if value == NULL: + data[key] = None + data["geometry"] = cls.extract_coordinates(feature) + features.append(data) + return features + + def table_to_dict(cls, layer) -> Dict[str, Any]: + features = defaultdict(list) + for feature in layer.getFeatures(): + features["geometry"].append(cls.extract_coordinates(feature)) + for key, value in feature.attributeMap().items(): + if value == NULL: + features[key].append(None) + else: + features[key].append(value) + + # Sort by layer if present. + if "layer" in features: + order = cls.argsort(features["layer"]) + return {k: [v[i] for i in order] for k, v in features.items()} + else: + return features + + @staticmethod + def point_xy(row): + point = row["geometry"][0] + return point[0], point[1] + + @staticmethod + def linestring_xy(row): + return remove_zero_length(row["geometry"]) + + @staticmethod + def polygon_xy(row): + return remove_zero_length(row["geometry"]) diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py new file mode 100644 index 0000000..b4bebd9 --- /dev/null +++ b/plugin/qgistim/core/schemata.py @@ -0,0 +1,321 @@ +import abc +import operator +from typing import Any, Sequence + +OPERATORS = { + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def discard_none(data): + return [v for v in data if v is not None] + + +def collect_errors(schemata, data, **kwargs): + errors = [] + for schema in schemata: + try: + schema.validate(data, **kwargs) + except ValidationError as e: + errors.append(str(e)) + + if errors: + raise ValidationError("\n\t".join(errors)) + + +class ValidationError(Exception): + pass + + +class BaseSchema(abc.ABC): + def __init__(self): + pass + + @abc.abstractmethod + def validate(self): + pass + + +# Schemata for a single value + + +class Optional(BaseSchema): + def __init__(self, *schemata): + self.schemata = schemata + + def validate(self, data, **kwargs): + if data is None: + return + collect_errors(self.schemata, data, **kwargs) + return + + +class Required(BaseSchema): + def __init__(self, *schemata): + self.schemata = schemata + + def validate(self, data, **kwargs): + if data is None: + raise ValidationError("a value is required.") + collect_errors(self.schemata, data, **kwargs) + return + + +class Positive: + def validate(self, data): + for value in data: + if value is not None and value < 0: + raise ValidationError(f"Non-positive value: {value}") + return + + +class Time(BaseSchema): + def validate(self, data, **kwargs): + start = kwargs["start"] + end = kwargs["end"] + if not (start < data < end): + raise ValidationError( + f"time does not fall in model time window: {start} to {end}" + ) + return + + +class AllOrNone(BaseSchema): + def __init__(self, *variables: Sequence[str]): + self.variables = variables + + def validate(self, data): + present = [data[v] is not None for v in self.variables] + if any(present) != all(present): + raise ValidationError( + "Exactly all or none of the following variables must be " + f"provided: {self.variables}" + ) + return + + +class Xor(BaseSchema): + "One or the other should be provided" + + def __init__(self, x: str, y: str): + self.x = x + self.y = y + + def validate(self, data): + if not ((data[self.x] is None) ^ (data[self.y] is None)): + raise ValidationError(f"Either {self.x} or {self.y} should be provided.") + return + + +class NotBoth(BaseSchema): + def __init__(self, x: str, y: str): + self.x = x + self.y = y + + def validate(self, data): + if (data[self.x] is not None) and (data[self.y] is not None): + raise ValidationError(f"Both {self.x} and {self.y} should not be provided.") + return + + +class Membership(BaseSchema): + def __init__(self, members_key: str): + self.members_key = members_key + + def validate(self, data, **kwargs): + if data is None: + return + member_values = kwargs[self.members_key] + if data not in member_values: + raise ValidationError( + f"Value {data} not found in {self.members_key}: {member_values}" + ) + return + + +# Schemata for collections of values + + +class AllRequired(BaseSchema): + def __init__(self, *schemata): + self.schemata = schemata + + def validate(self, data, **kwargs): + missing = [i + 1 for i, v in enumerate(data) if v is not None] + if missing: + raise ValidationError(f"No values provided at rows: {missing}") + + collect_errors(self.schemata, data, **kwargs) + return + + +class OffsetAllRequired(BaseSchema): + def __init__(self, *schemata): + self.schemata = schemata + + def validate(self, data, **kwargs): + missing = [i + 2 for i, v in enumerate(data[1:]) if v is not None] + if missing: + raise ValidationError(f"No values provided at rows: {missing}") + + collect_errors(self.schemata, data, **kwargs) + return + + +class AllOptional(BaseSchema): + def __init__(self, *schemata): + self.schemata = schemata + + def validate(self, data, **kwargs): + missing = [i + 1 for i, v in enumerate(data) if v is not None] + if len(missing) == len(data): + return + + collect_errors(self.schemata, data, **kwargs) + return + + +class Range(BaseSchema): + def validate(self, data): + expected = list(range(len(data))) + if not data == expected: + raise ValidationError(f"Expected {expected}, received: {data}") + return + + +class Increasing(BaseSchema): + def validate(self, data): + data = discard_none(data) + monotonic = all(a <= b for a, b in zip(data, data[1:])) + if not monotonic: + raise ValidationError(f"Values are not increasing: {data}") + return + + +class StrictlyIncreasing(BaseSchema): + def validate(self, data): + data = discard_none(data) + monotonic = all(a < b for a, b in zip(data, data[1:])) + if not monotonic: + raise ValidationError( + f"Values are not strictly increasing (no repeated values): {data}" + ) + return + + +class Decreasing(BaseSchema): + def validate(self, data): + data = discard_none(data) + monotonic = all(a >= b for a, b in zip(data, data[1:])) + if not monotonic: + raise ValidationError(f"Values are not decreasing: {data}") + return + + +class StrictlyDecreasing(BaseSchema): + def validate(self, data): + data = discard_none(data) + monotonic = all(a > b for a, b in zip(data, data[1:])) + if not monotonic: + raise ValidationError( + f"Values are not strictly decreasing (no repeated values): {data}" + ) + return + + +class AllGreaterEqual(BaseSchema): + def __init__(self, x, y): + self.x = x + self.y = y + + def validate(self, data): + x = data[self.x] + y = data[self.y] + wrong = [i + 1 for i, (a, b) in zip(x, y) if a < b] + if wrong: + raise ValidationError( + f"{self.x} is not greater or requal to {self.y} at rows: {wrong}" + ) + return + + +class FirstOnly(BaseSchema): + """Exclusively the first value must be provided.""" + + def validate(self, data): + if any(v is not None for v in data[1:]): + raise ValidationError("Only the first value may be filled in.") + return + + +# Global schemata + + +class SemiConfined(BaseSchema): + def validate(self, data): + semiconf_data = { + "aquitard_c": data["aquitard_c"][0], + "semiconf_top": data["semiconf_top"][0], + "semiconf_head": data["semiconf_head"][0], + } + present = [v is not None for v in semiconf_data.values()] + if any(present) != all(present): + variables = ", ".join(semiconf_data.keys()) + raise ValidationError( + "To enable a semi-confined top, the first row must be fully " + f"filled in for {variables}. To disable semi-confined top, none " + f"of the values must be filled in. Found: {semiconf_data}" + ) + return + + +class ValueSchema(BaseSchema, abc.ABC): + """ + Base class for AllValueSchema or AnyValueSchema. + """ + + def __init__( + self, + operator: str, + other: Any, + ): + self.operator = OPERATORS[operator] + self.operator_str = operator + self.other = other + + +class AllValue(ValueSchema): + def validate(self, data, **kwargs): + error = ValidationError( + f"Not all values comply with criterion: {self.operator_str} {self.other}" + ) + if isinstance(self.other, str): + other_obj = kwargs[self.other] + if any(self.operator(a, b) for a, b in zip(data, other_obj)): + raise error + else: + if any(self.operator(v, self.other) for v in data): + raise error + return + + +class InLayer(BaseSchema): + def validate(self, data, **kwargs): + allowed_layers = kwargs["layers"] + for value in data: + if value not in allowed_layers: + raise ValidationError( + f"Layer {value} is not in aquifer layers: {allowed_layers}" + ) + + +class SingleRow(BaseSchema): + def validate(self, data, **kwargs): + if len(data) != 1: + raise ValidationError("Constant may contain only one row.") + super().validate(data, **kwargs) diff --git a/plugin/qgistim/core/tim_elements.py b/plugin/qgistim/core/tim_elements.py index 9362441..7bac796 100644 --- a/plugin/qgistim/core/tim_elements.py +++ b/plugin/qgistim/core/tim_elements.py @@ -55,7 +55,7 @@ import re from collections import defaultdict from functools import partial -from typing import Any, List, Tuple +from typing import Any, Dict, List, Tuple from PyQt5.QtCore import QVariant from PyQt5.QtWidgets import ( @@ -510,6 +510,12 @@ class Well(TransientElement): QgsField("time_start", QVariant.Double), QgsField("discharge", QVariant.Double), ) + timml_defaults = { + "radius": QgsDefaultValue("0.1"), + "resistance": QgsDefaultValue("0.0"), + "caisson_radius": QgsDefaultValue("0.0"), + "slug": QgsDefaultValue("True"), + } transient_columns = ( "time_start", "time_end", @@ -543,6 +549,10 @@ class HeadWell(TransientElement): QgsField("time_start", QVariant.Double), QgsField("head", QVariant.Double), ) + timml_defaults = { + "radius": QgsDefaultValue("0.1"), + "resistance": QgsDefaultValue("0.0"), + } transient_columns = ( "time_start", "time_end", @@ -912,6 +922,7 @@ def parse_name(layername: str) -> Tuple[str, str, str]: "Temporal Settings": "Aquifer", "Polygon Inhomogeneity Properties": "Polygon Inhomogeneity", "Building Pit Properties": "Building Pit", + "Leaky Building Pit Properties": "Building Pit", } element_type = mapping.get(element_type, element_type) if "timml" in prefix: diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 9e7457f..57242cd 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -10,6 +10,7 @@ user chooses the transient simulation mode, a number of elements must be disabled (such as inhomogeneities). """ +import json from pathlib import Path from shutil import copy from typing import List, Set @@ -31,8 +32,8 @@ QWidget, ) from qgis.core import QgsApplication, QgsProject, QgsTask +from qgistim.core.elements import Aquifer, Domain, load_elements_from_geopackage from qgistim.core.task import BaseServerTask -from qgistim.core.tim_elements import Aquifer, Domain, load_elements_from_geopackage SUPPORTED_TTIM_ELEMENTS = set( [ @@ -196,6 +197,16 @@ def remove_geopackage_layers(self) -> None: return + def convert_to_timml(self): + elements = { + item.text(1): item.element + for item in self.items() + if item.timml_checkbox.isChecked() + } + print(elements) + data = {k: element.to_timml() for k, element in elements.items()} + return data + class DatasetWidget(QWidget): def __init__(self, parent): @@ -220,7 +231,7 @@ def __init__(self, parent): self.remove_button.clicked.connect(self.remove_geopackage_layer) self.add_button.clicked.connect(self.add_selection_to_qgis) self.convert_button = QPushButton("Convert GeoPackage to Python script") - self.convert_button.clicked.connect(self.convert) + self.convert_button.clicked.connect(self.convert_to_json) # Layout dataset_layout = QVBoxLayout() dataset_row = QHBoxLayout() @@ -433,3 +444,13 @@ def convert(self) -> None: self.parent.set_interpreter_interaction(False) QgsApplication.taskManager().addTask(self.convert_task) return + + def convert_to_json(self) -> None: + outpath, _ = QFileDialog.getSaveFileName(self, "Select file", "", "*.py") + if outpath == "": # Empty string in case of cancel button press + return + + data = self.dataset_tree.convert_to_timml() + json_string = json.dumps(data, indent=4) + print(json_string) + return diff --git a/plugin/qgistim/widgets/elements_widget.py b/plugin/qgistim/widgets/elements_widget.py index 9ac6454..ba24317 100644 --- a/plugin/qgistim/widgets/elements_widget.py +++ b/plugin/qgistim/widgets/elements_widget.py @@ -1,7 +1,7 @@ from functools import partial from PyQt5.QtWidgets import QGridLayout, QPushButton, QVBoxLayout, QWidget -from qgistim.core.tim_elements import ELEMENTS +from qgistim.core.elements import ELEMENTS class ElementsWidget(QWidget): diff --git a/plugin/qgistim/widgets/error_window.py b/plugin/qgistim/widgets/error_window.py new file mode 100644 index 0000000..09548f1 --- /dev/null +++ b/plugin/qgistim/widgets/error_window.py @@ -0,0 +1,18 @@ +from PyQt5.QtCore import Qt +from PyQt5.QtWidgets import QLabel, QScrollArea, QVBoxLayout, QWidget + + +class ErrorWindow(QScrollArea): + """ + A scrollable window for reporting (many) errors. + """ + + def __init__(self, title: str, message: str): + super().__init__() + widget = QWidget() + layout = QVBoxLayout(widget) + layout.setAlignment(Qt.AlignTop) + layout.addWidget(QLabel(message)) + self.setWidget(widget) + self.setWidgetResizable(True) + self.setWindowTitle(title) From ec1526dd368cf17e1056c667cbf75f0f0fbd35c2 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 7 Sep 2023 18:00:04 +0200 Subject: [PATCH 04/41] Start unit testing --- plugin/qgistim/core/schemata.py | 7 +++-- scripts/run_tests.bat | 14 ++++++++++ test_plugin/init.py | 0 test_plugin/test_schemata.py | 47 +++++++++++++++++++++++++++++++++ 4 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 scripts/run_tests.bat create mode 100644 test_plugin/init.py create mode 100644 test_plugin/test_schemata.py diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index b4bebd9..3b964c1 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -67,10 +67,9 @@ def validate(self, data, **kwargs): class Positive: - def validate(self, data): - for value in data: - if value is not None and value < 0: - raise ValidationError(f"Non-positive value: {value}") + def validate(self, value): + if value < 0: + raise ValidationError(f"Non-positive value: {value}") return diff --git a/scripts/run_tests.bat b/scripts/run_tests.bat new file mode 100644 index 0000000..43bada2 --- /dev/null +++ b/scripts/run_tests.bat @@ -0,0 +1,14 @@ +@echo off +call "c:\Program Files\QGIS 3.30.1\bin\o4w_env.bat" +path %OSGEO4W_ROOT%\apps\qgis\bin;%PATH% +set QGIS_PREFIX_PATH=%OSGEO4W_ROOT:\=/%/apps/qgis +set GDAL_FILENAME_IS_UTF8=YES +rem Set VSI cache to be used as buffer, see #6448 +set VSI_CACHE=TRUE +set VSI_CACHE_SIZE=1000000 +set QT_PLUGIN_PATH=%OSGEO4W_ROOT%\apps\qgis\qtplugins;%OSGEO4W_ROOT%\apps\qt5\plugins +set PYTHONPATH=%OSGEO4W_ROOT%\apps\qgis\python;%PYTHONPATH% +set PYTHONPATH=%CD%\..\plugin;%PYTHONPATH% +@echo on +python -m unittest discover --start-directory ..\test_plugin -v +PAUSE diff --git a/test_plugin/init.py b/test_plugin/init.py new file mode 100644 index 0000000..e69de29 diff --git a/test_plugin/test_schemata.py b/test_plugin/test_schemata.py new file mode 100644 index 0000000..65754c1 --- /dev/null +++ b/test_plugin/test_schemata.py @@ -0,0 +1,47 @@ +from unittest import TestCase + +from qgistim.core import schemata +from qgistim.core.schemata import ( + AllOrNone, + Optional, + Positive, + Required, + ValidationError, + Xor, +) + + +class TestDiscardNone(TestCase): + def test_discard(self): + actual = schemata.discard_none([1, 2]) + self.assertTrue(actual == [1, 2]) + + actual = schemata.discard_none([None, 1, None, 2, None]) + self.assertTrue(actual == [1, 2]) + + +class TestPositive(TestCase): + def test_positive(self): + with self.assertRaises(ValidationError): + Positive().validate(-1) + Positive().validate(0) + Positive().validate(1) + + +class TestOptional(TestCase): + def test_optional(self): + Optional(Positive()).validate(None) + with self.assertRaises(ValidationError): + Optional(Positive()).validate(-1) + Optional(Positive()).validate(0) + Optional(Positive()).validate(1) + + +class TestRequired(TestCase): + def test_required(self): + with self.assertRaises(ValidationError): + Required(Positive()).validate(None) + with self.assertRaises(ValidationError): + Required(Positive()).validate(-1) + Required(Positive()).validate(0) + Required(Positive()).validate(1) From 6521207b26e6b152f27f309f9521ccc140d7b55b Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Mon, 11 Sep 2023 15:10:36 +0200 Subject: [PATCH 05/41] Tests for schemata --- plugin/qgistim/core/schemata.py | 355 +++++++++++++++----------------- test_plugin/test_schemata.py | 238 +++++++++++++++++++-- 2 files changed, 389 insertions(+), 204 deletions(-) diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index 3b964c1..4b011fd 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -1,6 +1,6 @@ import abc import operator -from typing import Any, Sequence +from typing import List, Sequence, Union OPERATORS = { "<": operator.lt, @@ -12,90 +12,156 @@ } -def discard_none(data): - return [v for v in data if v is not None] +MaybeError = Union[None, str] +MaybeErrorList = Union[None, List[str]] -def collect_errors(schemata, data, **kwargs): - errors = [] - for schema in schemata: - try: - schema.validate(data, **kwargs) - except ValidationError as e: - errors.append(str(e)) +def format(data) -> str: + return ", ".join(map(str, data)) - if errors: - raise ValidationError("\n\t".join(errors)) - -class ValidationError(Exception): - pass +def discard_none(data): + return [v for v in data if v is not None] class BaseSchema(abc.ABC): + """Base class for single value.""" + def __init__(self): pass @abc.abstractmethod - def validate(self): + def validate(self, data, other) -> MaybeError: pass + def validate_many(self, data, other) -> MaybeErrorList: + errors = [] + for value in data: + error = self.validate(value, other) + if error is not None: + errors.append(error) + + if errors: + return errors + else: + return None + -# Schemata for a single value +class IterableSchema(BaseSchema, abc.ABC): + """Base class for collection of values.""" + + def validate_many(self, data, other) -> MaybeErrorList: + error = self.validate(data, other) + if error: + if isinstance(error, list): + return error + else: + return [error] + else: + return None -class Optional(BaseSchema): +class SchemaContainer(IterableSchema): def __init__(self, *schemata): self.schemata = schemata - def validate(self, data, **kwargs): - if data is None: - return - collect_errors(self.schemata, data, **kwargs) - return + def _validate_schemata(self, data, other=None) -> MaybeErrorList: + errors = [] + for schema in self.schemata: + if isinstance(data, list): + _errors = schema.validate_many(data, other) + if _errors: + errors.extend(_errors) + else: + _error = schema.validate(data, other) + if _error: + errors.append(_error) + if errors: + return errors + + return None -class Required(BaseSchema): - def __init__(self, *schemata): - self.schemata = schemata - def validate(self, data, **kwargs): +# SchemataContainer for a single value. + + +class Optional(SchemaContainer): + def validate(self, data, other=None) -> MaybeError: + if data is None: + return None + return self._validate_schemata(data, other) + + +class Required(SchemaContainer): + def validate(self, data, other=None) -> MaybeError: if data is None: - raise ValidationError("a value is required.") - collect_errors(self.schemata, data, **kwargs) - return + return "a value is required." + return self._validate_schemata(data, other) + + +# SchemataContainer for multiple values. + +class AllRequired(SchemaContainer): + def validate(self, data, other=None) -> MaybeErrorList: + missing = [i + 1 for i, v in enumerate(data) if v is None] + if missing: + return [f"No values provided at rows: {format(missing)}"] + return self._validate_schemata(data, other) + + +class OffsetAllRequired(SchemaContainer): + def validate(self, data, other=None) -> MaybeErrorList: + missing = [i + 2 for i, v in enumerate(data[1:]) if v is None] + if missing: + return [f"No values provided at rows: {format(missing)}"] + if data[0] is None: + return self._validate_schemata(data[1:], other) + else: + return self._validate_schemata(data, other) + + +class AllOptional(SchemaContainer): + def validate(self, data, other=None) -> MaybeErrorList: + missing = [i + 1 for i, v in enumerate(data) if v is None] + if len(missing) == len(data): + return None + return self._validate_schemata(data, other) -class Positive: - def validate(self, value): - if value < 0: - raise ValidationError(f"Non-positive value: {value}") - return + +# Schemata for a single value. + + +class Positive(BaseSchema): + def validate(self, data, _=None) -> MaybeError: + if data < 0: + return f"Non-positive value: {data}" + return None class Time(BaseSchema): - def validate(self, data, **kwargs): - start = kwargs["start"] - end = kwargs["end"] + def validate(self, data, other=None) -> MaybeError: + start = other["start"] + end = other["end"] if not (start < data < end): - raise ValidationError( - f"time does not fall in model time window: {start} to {end}" - ) - return + return f"time does not fall in model time window: {start} to {end}" + return None class AllOrNone(BaseSchema): def __init__(self, *variables: Sequence[str]): self.variables = variables - def validate(self, data): + def validate(self, data, _=None) -> MaybeError: present = [data[v] is not None for v in self.variables] if any(present) != all(present): - raise ValidationError( + vars = ", ".join(self.variables) + return ( "Exactly all or none of the following variables must be " - f"provided: {self.variables}" + f"provided: {vars}" ) - return + return None class Xor(BaseSchema): @@ -105,10 +171,10 @@ def __init__(self, x: str, y: str): self.x = x self.y = y - def validate(self, data): + def validate(self, data, _=None) -> MaybeError: if not ((data[self.x] is None) ^ (data[self.y] is None)): - raise ValidationError(f"Either {self.x} or {self.y} should be provided.") - return + return f"Either {self.x} or {self.y} should be provided, not both." + return None class NotBoth(BaseSchema): @@ -116,147 +182,100 @@ def __init__(self, x: str, y: str): self.x = x self.y = y - def validate(self, data): + def validate(self, data, _=None) -> MaybeError: if (data[self.x] is not None) and (data[self.y] is not None): - raise ValidationError(f"Both {self.x} and {self.y} should not be provided.") - return + return f"Either {self.x} or {self.y} should be provided, not both." + return None class Membership(BaseSchema): def __init__(self, members_key: str): self.members_key = members_key - def validate(self, data, **kwargs): + def validate(self, data, other=None) -> MaybeError: if data is None: - return - member_values = kwargs[self.members_key] + return None + member_values = other[self.members_key] if data not in member_values: - raise ValidationError( - f"Value {data} not found in {self.members_key}: {member_values}" + return ( + f"Value {data} not found in {self.members_key}: {format(member_values)}" ) - return - - -# Schemata for collections of values - - -class AllRequired(BaseSchema): - def __init__(self, *schemata): - self.schemata = schemata - - def validate(self, data, **kwargs): - missing = [i + 1 for i, v in enumerate(data) if v is not None] - if missing: - raise ValidationError(f"No values provided at rows: {missing}") + return None - collect_errors(self.schemata, data, **kwargs) - return +# Schemata for a collection of values. -class OffsetAllRequired(BaseSchema): - def __init__(self, *schemata): - self.schemata = schemata - - def validate(self, data, **kwargs): - missing = [i + 2 for i, v in enumerate(data[1:]) if v is not None] - if missing: - raise ValidationError(f"No values provided at rows: {missing}") - collect_errors(self.schemata, data, **kwargs) - return - - -class AllOptional(BaseSchema): - def __init__(self, *schemata): - self.schemata = schemata - - def validate(self, data, **kwargs): - missing = [i + 1 for i, v in enumerate(data) if v is not None] - if len(missing) == len(data): - return - - collect_errors(self.schemata, data, **kwargs) - return - - -class Range(BaseSchema): - def validate(self, data): +class Range(IterableSchema): + def validate(self, data, _=None) -> MaybeError: expected = list(range(len(data))) if not data == expected: - raise ValidationError(f"Expected {expected}, received: {data}") - return + return f"Expected {format(expected)}; received {format(data)}" + return None -class Increasing(BaseSchema): - def validate(self, data): - data = discard_none(data) +class Increasing(IterableSchema): + def validate(self, data, _=None) -> MaybeError: monotonic = all(a <= b for a, b in zip(data, data[1:])) if not monotonic: - raise ValidationError(f"Values are not increasing: {data}") - return + return f"Values are not increasing: {format(data)}" + return None -class StrictlyIncreasing(BaseSchema): - def validate(self, data): - data = discard_none(data) +class StrictlyIncreasing(IterableSchema): + def validate(self, data, _=None) -> MaybeError: monotonic = all(a < b for a, b in zip(data, data[1:])) if not monotonic: - raise ValidationError( - f"Values are not strictly increasing (no repeated values): {data}" - ) - return + return f"Values are not strictly increasing (no repeated values): {format(data)}" + return None -class Decreasing(BaseSchema): - def validate(self, data): - data = discard_none(data) +class Decreasing(IterableSchema): + def validate(self, data, _=None) -> MaybeError: monotonic = all(a >= b for a, b in zip(data, data[1:])) if not monotonic: - raise ValidationError(f"Values are not decreasing: {data}") - return + return f"Values are not decreasing: {format(data)}" + return None -class StrictlyDecreasing(BaseSchema): - def validate(self, data): - data = discard_none(data) +class StrictlyDecreasing(IterableSchema): + def validate(self, data, _=None) -> MaybeError: monotonic = all(a > b for a, b in zip(data, data[1:])) if not monotonic: - raise ValidationError( - f"Values are not strictly decreasing (no repeated values): {data}" - ) - return + return f"Values are not strictly decreasing (no repeated values): {format(data)}" + return None -class AllGreaterEqual(BaseSchema): +class AllGreaterEqual(IterableSchema): def __init__(self, x, y): self.x = x self.y = y - def validate(self, data): + def validate(self, data, _=None) -> MaybeError: x = data[self.x] y = data[self.y] - wrong = [i + 1 for i, (a, b) in zip(x, y) if a < b] + wrong = [i + 1 for i, (a, b) in enumerate(zip(x, y)) if a < b] if wrong: - raise ValidationError( - f"{self.x} is not greater or requal to {self.y} at rows: {wrong}" + return ( + f"{self.x} is not greater or equal to {self.y} at rows: {format(wrong)}" ) - return + return None -class FirstOnly(BaseSchema): +class FirstOnly(SchemaContainer): """Exclusively the first value must be provided.""" - def validate(self, data): + def validate(self, data, other=None) -> Union[MaybeError, MaybeErrorList]: if any(v is not None for v in data[1:]): - raise ValidationError("Only the first value may be filled in.") - return + return "Only the first value may be filled in." + return self._validate_schemata(data[0], other) -# Global schemata +# Consistency schemata -class SemiConfined(BaseSchema): - def validate(self, data): +class SemiConfined(IterableSchema): + def validate(self, data, other) -> MaybeError: semiconf_data = { "aquitard_c": data["aquitard_c"][0], "semiconf_top": data["semiconf_top"][0], @@ -264,57 +283,19 @@ def validate(self, data): } present = [v is not None for v in semiconf_data.values()] if any(present) != all(present): - variables = ", ".join(semiconf_data.keys()) - raise ValidationError( + variables = format(semiconf_data.keys()) + values = format(semiconf_data.values()) + return ( "To enable a semi-confined top, the first row must be fully " f"filled in for {variables}. To disable semi-confined top, none " - f"of the values must be filled in. Found: {semiconf_data}" + f"of the values must be filled in. Found: {values}" ) - return - - -class ValueSchema(BaseSchema, abc.ABC): - """ - Base class for AllValueSchema or AnyValueSchema. - """ - - def __init__( - self, - operator: str, - other: Any, - ): - self.operator = OPERATORS[operator] - self.operator_str = operator - self.other = other - - -class AllValue(ValueSchema): - def validate(self, data, **kwargs): - error = ValidationError( - f"Not all values comply with criterion: {self.operator_str} {self.other}" - ) - if isinstance(self.other, str): - other_obj = kwargs[self.other] - if any(self.operator(a, b) for a, b in zip(data, other_obj)): - raise error - else: - if any(self.operator(v, self.other) for v in data): - raise error - return - - -class InLayer(BaseSchema): - def validate(self, data, **kwargs): - allowed_layers = kwargs["layers"] - for value in data: - if value not in allowed_layers: - raise ValidationError( - f"Layer {value} is not in aquifer layers: {allowed_layers}" - ) + if data["semiconf_top"][0] <= other["aquifer_top"][0]: + return "semiconf_top must be greater than first aquifer_top." + return None -class SingleRow(BaseSchema): - def validate(self, data, **kwargs): +class SingleRow(IterableSchema): + def validate(self, data, other=None) -> MaybeError: if len(data) != 1: - raise ValidationError("Constant may contain only one row.") - super().validate(data, **kwargs) + return "Constant may contain only one row." diff --git a/test_plugin/test_schemata.py b/test_plugin/test_schemata.py index 65754c1..507c58a 100644 --- a/test_plugin/test_schemata.py +++ b/test_plugin/test_schemata.py @@ -2,12 +2,23 @@ from qgistim.core import schemata from qgistim.core.schemata import ( + AllGreaterEqual, + AllOptional, AllOrNone, + AllRequired, + Decreasing, + FirstOnly, + Increasing, + Membership, + NotBoth, + OffsetAllRequired, Optional, Positive, + Range, Required, - ValidationError, - Xor, + SemiConfined, + StrictlyDecreasing, + StrictlyIncreasing, ) @@ -22,26 +33,219 @@ def test_discard(self): class TestPositive(TestCase): def test_positive(self): - with self.assertRaises(ValidationError): - Positive().validate(-1) - Positive().validate(0) - Positive().validate(1) + self.assertEqual(Positive().validate(-1), "Non-positive value: -1") + self.assertIsNone(Positive().validate(0)) + self.assertIsNone(Positive().validate(1)) class TestOptional(TestCase): def test_optional(self): - Optional(Positive()).validate(None) - with self.assertRaises(ValidationError): - Optional(Positive()).validate(-1) - Optional(Positive()).validate(0) - Optional(Positive()).validate(1) + self.assertIsNone(Optional(Positive()).validate(None)) + self.assertIsNone(Optional(Positive()).validate(0)) + self.assertIsNone(Optional(Positive()).validate(1)) + self.assertEqual(Optional(Positive()).validate(-1), ["Non-positive value: -1"]) class TestRequired(TestCase): def test_required(self): - with self.assertRaises(ValidationError): - Required(Positive()).validate(None) - with self.assertRaises(ValidationError): - Required(Positive()).validate(-1) - Required(Positive()).validate(0) - Required(Positive()).validate(1) + self.assertEqual(Required(Positive()).validate(None), "a value is required.") + self.assertEqual(Required(Positive()).validate(-1), ["Non-positive value: -1"]) + self.assertIsNone(Required(Positive()).validate(0)) + self.assertIsNone(Required(Positive()).validate(1)) + + +class TestAllOrNone(TestCase): + def test_all_or_none(self): + schema = AllOrNone("a", "b", "c") + d = {"a": None, "b": None, "c": None} + self.assertIsNone(schema.validate(d)) + d = {"a": 1, "b": 2, "c": 3} + self.assertIsNone(schema.validate(d)) + d = {"a": None, "b": 2, "c": 3} + expected = ( + "Exactly all or none of the following variables must be provided: a, b, c" + ) + self.assertEqual(schema.validate(d), expected) + + +class TestNotBoth(TestCase): + def test_not_both(self): + schema = NotBoth("a", "b") + d = {"a": None, "b": None} + self.assertIsNone(schema.validate(d)) + d = {"a": 1, "b": None} + self.assertIsNone(schema.validate(d)) + d = {"a": None, "b": 1} + self.assertIsNone(schema.validate(d)) + d = {"a": 1, "b": 1} + self.assertEqual( + schema.validate(d), "Either a or b should be provided, not both." + ) + + +class TestMembership(TestCase): + def test_membership(self): + schema = Membership("model layers") + other = {"model layers": [1, 2, 3]} + self.assertIsNone(schema.validate(None, other)) + self.assertIsNone(schema.validate(1, other)) + self.assertEqual( + schema.validate(0, other), "Value 0 not found in model layers: 1, 2, 3" + ) + + +class TestAllRequired(TestCase): + def test_all_required(self): + schema = AllRequired(Positive()) + self.assertIsNone(schema.validate([1, 2, 3])) + self.assertEqual( + schema.validate([None, 2, None]), ["No values provided at rows: 1, 3"] + ) + self.assertEqual(schema.validate([1, 2, -1]), ["Non-positive value: -1"]) + + +class TestOffsetAllRequired(TestCase): + def test_offset_all_required(self): + schema = OffsetAllRequired(Positive()) + self.assertIsNone(schema.validate([None, 2, 3])) + self.assertEqual( + schema.validate([None, 2, None]), ["No values provided at rows: 3"] + ) + self.assertEqual(schema.validate([None, 2, -1]), ["Non-positive value: -1"]) + + +class TestAllOptional(TestCase): + def test_all_optional(self): + schema = AllOptional(Positive()) + self.assertIsNone(schema.validate([None, None, None])) + self.assertIsNone(schema.validate([1, 2, 3])) + self.assertEqual(schema.validate([-1, 2, 3]), ["Non-positive value: -1"]) + + def test_all_optional_first_only(self): + schema = AllOptional(FirstOnly()) + self.assertIsNone(schema.validate([None, None, None])) + self.assertIsNone(schema.validate([1, None, None])) + self.assertEqual( + schema.validate([1, 1, None]), ["Only the first value may be filled in."] + ) + + def test_all_optional_first_only_positive(self): + schema = AllOptional(FirstOnly(Positive())) + self.assertIsNone(schema.validate([None, None, None])) + self.assertIsNone(schema.validate([1, None, None])) + self.assertEqual( + schema.validate([1, 1, None]), ["Only the first value may be filled in."] + ) + self.assertEqual(schema.validate([-1, None, None]), ["Non-positive value: -1"]) + + +class TestRange(TestCase): + def test_range(self): + schema = Range() + self.assertIsNone(schema.validate([0, 1, 2, 3])) + self.assertEqual( + schema.validate([1, 2, 3]), "Expected 0, 1, 2; received 1, 2, 3" + ) + + +class TestIncreasing(TestCase): + def test_increasing(self): + schema = Increasing() + self.assertIsNone(schema.validate([0, 1, 2, 3])) + self.assertIsNone(schema.validate([0, 1, 1, 2, 3])) + self.assertEqual( + schema.validate([1, 0, 2]), "Values are not increasing: 1, 0, 2" + ) + + +class TestStrictlyIncreasing(TestCase): + def test_strictly_increasing(self): + schema = StrictlyIncreasing() + self.assertIsNone(schema.validate([0, 1, 2, 3])) + self.assertEqual( + schema.validate([1, 0, 2]), + "Values are not strictly increasing (no repeated values): 1, 0, 2", + ) + self.assertEqual( + schema.validate([0, 1, 1, 2]), + "Values are not strictly increasing (no repeated values): 0, 1, 1, 2", + ) + + +class TestDecreasing(TestCase): + def test_decreasing(self): + schema = Decreasing() + self.assertIsNone(schema.validate([3, 2, 1, 0])) + self.assertIsNone(schema.validate([3, 2, 2, 1, 1])) + self.assertEqual( + schema.validate([1, 0, 2]), "Values are not decreasing: 1, 0, 2" + ) + + +class TestStrictlyDecreasing(TestCase): + def test_strictly_decreasing(self): + schema = StrictlyDecreasing() + self.assertIsNone(schema.validate([3, 2, 1, 0])) + self.assertEqual( + schema.validate([1, 0, 2]), + "Values are not strictly decreasing (no repeated values): 1, 0, 2", + ) + self.assertEqual( + schema.validate([2, 1, 1, 0]), + "Values are not strictly decreasing (no repeated values): 2, 1, 1, 0", + ) + + +class TestAllGreateEqual(TestCase): + def test_all_greater_equal(self): + schema = AllGreaterEqual("top", "bot") + d = {"top": [1.0, 0.0], "bot": [0.0, -1.0]} + self.assertIsNone(schema.validate(d)) + d = {"top": [0.0, -1.0], "bot": [0.0, -1.0]} + self.assertIsNone(schema.validate(d)) + d = {"bot": [1.0, 0.0], "top": [0.0, -1.0]} + expected = "top is not greater or equal to bot at rows: 1, 2" + self.assertEqual(schema.validate(d), expected) + + +class TestFirstOnly(TestCase): + def test_first_only(self): + schema = FirstOnly() + self.assertIsNone(schema.validate([None, None])) + self.assertIsNone(schema.validate([1, None])) + self.assertEqual( + schema.validate([1, 1]), "Only the first value may be filled in." + ) + + +class TestSemiConfined(TestCase): + def test_semi_confined(self): + schema = SemiConfined() + other = {"aquifer_top": [0.0, 1.0]} + d = { + "aquitard_c": [1.0, None], + "semiconf_top": [1.0, None], + "semiconf_head": [1.0, None], + } + self.assertIsNone(schema.validate(d, other)) + d = { + "aquitard_c": [None, None], + "semiconf_top": [1.0, None], + "semiconf_head": [1.0, None], + } + expected = ( + "To enable a semi-confined top, the first row must be fully " + "filled in for aquitard_c, semiconf_top, semiconf_head. To disable semi-confined top, none " + "of the values must be filled in. Found: None, 1.0, 1.0" + ) + self.assertEqual(schema.validate(d, other), expected) + + d = { + "aquitard_c": [1.0, None], + "semiconf_top": [-1.0, None], + "semiconf_head": [1.0, None], + } + self.assertEqual( + schema.validate(d, other), + "semiconf_top must be greater than first aquifer_top.", + ) From 65a4ad5f19519aa382fe8d87ee8fd750ac587671 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Mon, 11 Sep 2023 20:48:32 +0200 Subject: [PATCH 06/41] WIP: show errors in HTML --- plugin/qgistim/core/elements/aquifer.py | 58 +++++++------- .../core/elements/circular_area_sink.py | 2 +- plugin/qgistim/core/elements/constant.py | 2 +- plugin/qgistim/core/elements/domain.py | 28 ++++--- plugin/qgistim/core/elements/element.py | 76 ++++++++----------- .../qgistim/core/elements/head_line_sink.py | 2 +- plugin/qgistim/core/elements/headwell.py | 2 +- plugin/qgistim/core/elements/well.py | 2 +- plugin/qgistim/core/schemata.py | 16 ++-- plugin/qgistim/widgets/dataset_widget.py | 24 +++++- plugin/qgistim/widgets/error_window.py | 47 ++++++++---- test_plugin/test_aquifer_schemata.py | 68 +++++++++++++++++ test_plugin/test_schemata.py | 16 ++-- 13 files changed, 225 insertions(+), 118 deletions(-) create mode 100644 test_plugin/test_aquifer_schemata.py diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py index 3ffae7b..35079d3 100644 --- a/plugin/qgistim/core/elements/aquifer.py +++ b/plugin/qgistim/core/elements/aquifer.py @@ -1,11 +1,7 @@ -from collections import defaultdict -from dataclasses import dataclass -from typing import Any, Dict, List - from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField from qgistim.core import geopackage -from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllGreaterEqual, AllOptional, @@ -19,6 +15,26 @@ ) +class AquiferSchema(ElementSchema): + timml_schemata = { + "layer": AllRequired(Range()), + "aquifer_top": AllRequired(StrictlyDecreasing()), + "aquifer_bottom": AllRequired(StrictlyDecreasing()), + "aquitard_c": OffsetAllRequired(Positive()), + "aquifer_k": AllRequired(Positive()), + "semiconf_top": AllOptional(FirstOnly()), + "semiconf_head": AllOptional(FirstOnly()), + } + timml_consistency_schemata = ( + SemiConfined(), + AllGreaterEqual("aquifer_top", "aquifer_bottom"), + ) + ttim_schemata = { + "aquitard_s": OffsetAllRequired(Positive()), + "aquifer_s": AllRequired(Positive()), + } + + class Aquifer(TransientElement): element_type = "Aquifer" geometry_type = "No Geometry" @@ -54,24 +70,7 @@ class Aquifer(TransientElement): "aquitard_npor", "aquifer_npor", ) - - timml_schemata = { - "layer": AllRequired(Range()), - "aquifer_top": AllRequired(StrictlyDecreasing()), - "aquifer_bottom": AllRequired(StrictlyDecreasing()), - "aquifer_k": AllRequired(Positive()), - "aquitard_c": OffsetAllRequired(Positive()), - "semiconf_top": AllOptional(FirstOnly()), - "semiconf_head": AllOptional(FirstOnly()), - } - timml_global_schemata = ( - SemiConfined(), - AllGreaterEqual("aquifer_top", "aquifer_bottom"), - ) - ttim_schemata = { - "aquitard_s": OffsetAllRequired(Positive()), - "aquifer_s": AllRequired(Positive()), - } + schema = AquiferSchema() def __init__(self, path: str, name: str): self._initialize_default(path, name) @@ -93,9 +92,16 @@ def remove_from_geopackage(self): def to_timml(self): data = self.table_to_dict(self.timml_layer) - self.validate_timml(data) - return self.aquifer_data(data, transient=False) + errors = self.schema.validate_timml(data) + if errors: + return errors, None + else: + return None, self.aquifer_data(data, transient=False) def to_ttim(self): data = self.table_to_dict(self.timml_layer) - return self.aquifer_data(data, transient=True) + errors = self.schema.validate_ttim(data) + if errors: + return errors, None + else: + return None, self.aquifer_data(data, transient=False) diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index cebeb36..e8674f7 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -6,7 +6,7 @@ class CircularAreaSinkSchema: class TransientCircularAreaSinkSchema: - global_schemata = ( + consistency_schemata = ( AllOrNone("time_start", "time_end", "rate_transient"), NotBoth("time_start", "timeseries_id"), ) diff --git a/plugin/qgistim/core/elements/constant.py b/plugin/qgistim/core/elements/constant.py index 635e3e5..133c7bd 100644 --- a/plugin/qgistim/core/elements/constant.py +++ b/plugin/qgistim/core/elements/constant.py @@ -6,7 +6,7 @@ class ConstantSchema(ElementSchema): - global_schemata = (SingleRow(),) + consistency_schemata = (SingleRow(),) schemata = { "head": Required(), "layer": Membership("layers"), diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index 3dc8e2b..fe49b5d 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -9,13 +9,19 @@ QgsSingleSymbolRenderer, ) from qgistim.core.elements.colors import BLACK -from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.schemata import SingleRow + + +class DomainSchema(ElementSchema): + timml_consistency_schemata = (SingleRow,) class Domain(TransientElement): element_type = "Domain" geometry_type = "Polygon" ttim_attributes = (QgsField("time", QVariant.Double),) + schema = DomainSchema() def __init__(self, path: str, name: str): self._initialize_default(path, name) @@ -57,14 +63,18 @@ def update_extent(self, iface: Any) -> Tuple[float, float]: def to_timml(self): data = self.to_dict(self.timml_layer) - x = [point[0] for point in data[0]["geometry"]] - y = [point[1] for point in data[0]["geometry"]] - return { - "xmin": min(x), - "xmax": max(x), - "ymin": min(y), - "ymax": max(y), - } + errors = self.schema.validate_timml(data) + if errors: + return errors, None + else: + x = [point[0] for point in data[0]["geometry"]] + y = [point[1] for point in data[0]["geometry"]] + return None, { + "xmin": min(x), + "xmax": max(x), + "ymin": min(y), + "ymax": max(y), + } def to_ttim(self): data = self.to_timml() diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 81ad441..9256065 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -53,7 +53,7 @@ import abc from collections import defaultdict -from typing import Any, List +from typing import Any, Dict, List from PyQt5.QtWidgets import ( QDialog, @@ -72,14 +72,37 @@ ) from qgistim.core import geopackage from qgistim.core.extractor import ExtractorMixin -from qgistim.core.schemata import ValidationError class ElementSchema(abc.ABC): - def validate(self, data, **kwargs): - for key, value in self.schemata.items(): - value.validate(data[key], **kwargs) - return + timml_schemata = {} + timml_consistency_schemata = {} + ttim_schemata = {} + ttim_consistency_schemata = {} + + def validate_timml(self, data, other=None) -> Dict[str, List]: + if len(data) == 0: + return {"table": ["Table is empty."]} + + errors = defaultdict(list) + for variable, schema in self.timml_schemata.items(): + _errors = schema.validate(data[variable], other) + if _errors is not None: + errors[variable].extend(_errors) + + if not errors: + for schema in self.timml_consistency_schemata: + _error = schema.validate(data, other) + if _error: + errors["table"].append(_error) + + if errors: + return errors + else: + return None + + def validate_ttim(self, data, other=None) -> Dict[str, List]: + raise NotImplementedError class NameDialog(QDialog): @@ -252,29 +275,8 @@ def on_transient_changed(self, transient: bool): return @abc.abstractmethod - def to_timml(self, data, **kwargs): - """ """ - - def validate(self, data, **kwargs): - errors = [] - for schema in self.global_schemata: - try: - schema.validate(data, **kwargs) - except ValidationError as e: - errors.append(str(e)) - - for variable, schema in self.schemata: - variable_errors = [] - try: - schema.validate(data, **kwargs) - except ValidationError as e: - variable_errors.append(str(e)) - - if variable_errors: - errors.append(f"{variable:}\n") - errors.append(variable_errors) - - return errors + def to_timml(self, other=None): + pass @staticmethod def transient_input(row, all_timeseries, variable: str, time_start): @@ -354,22 +356,6 @@ def interleave(a, b): return aquifer - def validate_timml(self, data): - errors = defaultdict(list) - for variable, schema in self.timml_schemata.items(): - try: - schema.validate(data) - except ValidationError as e: - errors[variable].append(e) - - for schema in self.timml_global_schemata: - try: - schema.validate(data) - except ValidationError as e: - errors[""].append(e) - - return errors - class TransientElement(Element): """ diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index c5aa1dd..059f4a7 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -24,7 +24,7 @@ class HeadLineSinkSchema: class TransientHeadLineSinkSchema: - global_schemata = ( + consistency_schemata = ( AllOrNone("time_start", "time_end", "head_transient"), NotBoth("time_start", "timeseries_id"), Time(), diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index ca77a0b..72b2050 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -11,7 +11,7 @@ class TransientHeadWellSchema: schemata = { "timeseries_id": [MembershipSchema("timeseries_ids")], } - global_schemata = ( + consistency_schemata = ( AllOrNoneSchema(("time_start", "time_end", "head_transient")), XorSchema("time_start", "timeseries_id"), ) diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index 5663d48..5fe8d05 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -30,7 +30,7 @@ class TransientWellSchema: "time_end": Optional(Time()), "timeseries_id": Optional(Membership("timeseries_ids")), } - global_schemata = ( + consistency_schemata = ( AllOrNone(("time_start", "time_end", "discharge_transient")), NotBoth("time_start", "timeseries_id"), ) diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index 4b011fd..739da68 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -107,7 +107,7 @@ class AllRequired(SchemaContainer): def validate(self, data, other=None) -> MaybeErrorList: missing = [i + 1 for i, v in enumerate(data) if v is None] if missing: - return [f"No values provided at rows: {format(missing)}"] + return [f"No values provided at row(s): {format(missing)}"] return self._validate_schemata(data, other) @@ -115,7 +115,7 @@ class OffsetAllRequired(SchemaContainer): def validate(self, data, other=None) -> MaybeErrorList: missing = [i + 2 for i, v in enumerate(data[1:]) if v is None] if missing: - return [f"No values provided at rows: {format(missing)}"] + return [f"No values provided at row(s): {format(missing)}"] if data[0] is None: return self._validate_schemata(data[1:], other) else: @@ -256,9 +256,7 @@ def validate(self, data, _=None) -> MaybeError: y = data[self.y] wrong = [i + 1 for i, (a, b) in enumerate(zip(x, y)) if a < b] if wrong: - return ( - f"{self.x} is not greater or equal to {self.y} at rows: {format(wrong)}" - ) + return f"{self.x} is not greater or equal to {self.y} at row(s): {format(wrong)}" return None @@ -275,7 +273,7 @@ def validate(self, data, other=None) -> Union[MaybeError, MaybeErrorList]: class SemiConfined(IterableSchema): - def validate(self, data, other) -> MaybeError: + def validate(self, data, _=None) -> MaybeError: semiconf_data = { "aquitard_c": data["aquitard_c"][0], "semiconf_top": data["semiconf_top"][0], @@ -290,7 +288,8 @@ def validate(self, data, other) -> MaybeError: f"filled in for {variables}. To disable semi-confined top, none " f"of the values must be filled in. Found: {values}" ) - if data["semiconf_top"][0] <= other["aquifer_top"][0]: + semitop = data["semiconf_top"][0] + if semitop is not None and semitop <= data["aquifer_top"][0]: return "semiconf_top must be greater than first aquifer_top." return None @@ -298,4 +297,5 @@ def validate(self, data, other) -> MaybeError: class SingleRow(IterableSchema): def validate(self, data, other=None) -> MaybeError: if len(data) != 1: - return "Constant may contain only one row." + return "Table may contain only one row." + return None diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 57242cd..b4e9851 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -34,6 +34,7 @@ from qgis.core import QgsApplication, QgsProject, QgsTask from qgistim.core.elements import Aquifer, Domain, load_elements_from_geopackage from qgistim.core.task import BaseServerTask +from qgistim.widgets.error_window import ValidationDialog SUPPORTED_TTIM_ELEMENTS = set( [ @@ -203,9 +204,15 @@ def convert_to_timml(self): for item in self.items() if item.timml_checkbox.isChecked() } - print(elements) - data = {k: element.to_timml() for k, element in elements.items()} - return data + data = {} + errors = {} + for name, element in elements.items(): + _errors, _data = element.to_timml() + if _errors: + errors[name] = _errors + else: + data[name] = _data + return errors, data class DatasetWidget(QWidget): @@ -248,6 +255,7 @@ def __init__(self, parent): dataset_layout.addLayout(layer_row) dataset_layout.addWidget(self.convert_button) self.setLayout(dataset_layout) + self.validation_dialog = None @property def path(self) -> str: @@ -446,11 +454,19 @@ def convert(self) -> None: return def convert_to_json(self) -> None: + if self.validation_dialog: + self.validation_dialog.close() + self.validation_dialog = None + outpath, _ = QFileDialog.getSaveFileName(self, "Select file", "", "*.py") if outpath == "": # Empty string in case of cancel button press return - data = self.dataset_tree.convert_to_timml() + errors, data = self.dataset_tree.convert_to_timml() + if errors: + self.validation_dialog = ValidationDialog(errors) + return + json_string = json.dumps(data, indent=4) print(json_string) return diff --git a/plugin/qgistim/widgets/error_window.py b/plugin/qgistim/widgets/error_window.py index 09548f1..6cf94e8 100644 --- a/plugin/qgistim/widgets/error_window.py +++ b/plugin/qgistim/widgets/error_window.py @@ -1,18 +1,37 @@ -from PyQt5.QtCore import Qt -from PyQt5.QtWidgets import QLabel, QScrollArea, QVBoxLayout, QWidget +from typing import Any, Dict +from PyQt5.QtWidgets import QDialog, QHBoxLayout, QPushButton, QTextBrowser, QVBoxLayout -class ErrorWindow(QScrollArea): - """ - A scrollable window for reporting (many) errors. - """ - def __init__(self, title: str, message: str): +def format_errors(errors: Dict[str, Dict[str, Any]]): + messages = [] + for element, element_errors in errors.items(): + messages.append(f"{element}") + for variable, var_errors in element_errors.items(): + messages.append("
    ") + messages.extend(f"
  • {variable}: {error}
  • " for error in var_errors) + messages.append("
") + return "".join(messages) + + +class ValidationDialog(QDialog): + def __init__(self, errors: Dict[str, Dict[str, Any]]): super().__init__() - widget = QWidget() - layout = QVBoxLayout(widget) - layout.setAlignment(Qt.AlignTop) - layout.addWidget(QLabel(message)) - self.setWidget(widget) - self.setWidgetResizable(True) - self.setWindowTitle(title) + self.cancel_button = QPushButton("Close") + self.cancel_button.clicked.connect(self.reject) + self.textbox = QTextBrowser() + self.textbox.setReadOnly(True) + self.textbox.setHtml(format_errors(errors)) + first_row = QHBoxLayout() + first_row.addWidget(self.textbox) + second_row = QHBoxLayout() + second_row.addStretch() + second_row.addWidget(self.cancel_button) + layout = QVBoxLayout() + layout.addLayout(first_row) + layout.addLayout(second_row) + self.setLayout(layout) + self.setWindowTitle("Invalid model input") + self.textbox.setMinimumWidth(500) + self.textbox.setMinimumHeight(500) + self.show() diff --git a/test_plugin/test_aquifer_schemata.py b/test_plugin/test_aquifer_schemata.py new file mode 100644 index 0000000..44fddb1 --- /dev/null +++ b/test_plugin/test_aquifer_schemata.py @@ -0,0 +1,68 @@ +from unittest import TestCase + +from qgistim.core.elements.aquifer import AquiferSchema + + +class TestAquiferSchema(TestCase): + def test_validate(self): + schema = AquiferSchema() + data = { + "layer": [0], + "aquifer_top": [10.0], + "aquifer_bottom": [0.0], + "aquitard_c": [None], + "aquifer_k": [5.0], + "semiconf_top": [None], + "semiconf_head": [None], + } + self.assertIsNone(schema.validate_timml(data)) + + def test_validate_empty(self): + schema = AquiferSchema() + data = {} + self.assertEqual(schema.validate_timml(data), {"table": ["Table is empty."]}) + + def test_validate_two_layer(self): + schema = AquiferSchema() + data = { + "layer": [0, 1], + "aquifer_top": [10.0, -5.0], + "aquifer_bottom": [0.0, -15.0], + "aquitard_c": [None, 100.0], + "aquifer_k": [5.0, 10.0], + "semiconf_top": [None], + "semiconf_head": [None], + } + self.assertIsNone(schema.validate_timml(data)) + + def test_validate_two_layer_invalid(self): + schema = AquiferSchema() + data = { + "layer": [0, 1], + "aquifer_top": [10.0, -5.0], + "aquifer_bottom": [0.0, -15.0], + "aquitard_c": [None, None], + "aquifer_k": [5.0, 10.0], + "semiconf_top": [None], + "semiconf_head": [None], + } + expected = {"aquitard_c": ["No values provided at row(s): 2"]} + self.assertEqual(schema.validate_timml(data), expected) + + def test_validate_two_layer_consistency(self): + schema = AquiferSchema() + data = { + "layer": [0, 1], + "aquifer_top": [9.0, -15.0], + "aquifer_bottom": [10.0, -5.0], + "aquitard_c": [None, 10.0], + "aquifer_k": [5.0, 10.0], + "semiconf_top": [None], + "semiconf_head": [None], + } + expected = { + "table": [ + "aquifer_top is not greater or equal to aquifer_bottom at row(s): 1, 2" + ] + } + self.assertEqual(schema.validate_timml(data), expected) diff --git a/test_plugin/test_schemata.py b/test_plugin/test_schemata.py index 507c58a..9b0eb06 100644 --- a/test_plugin/test_schemata.py +++ b/test_plugin/test_schemata.py @@ -99,7 +99,7 @@ def test_all_required(self): schema = AllRequired(Positive()) self.assertIsNone(schema.validate([1, 2, 3])) self.assertEqual( - schema.validate([None, 2, None]), ["No values provided at rows: 1, 3"] + schema.validate([None, 2, None]), ["No values provided at row(s): 1, 3"] ) self.assertEqual(schema.validate([1, 2, -1]), ["Non-positive value: -1"]) @@ -109,7 +109,7 @@ def test_offset_all_required(self): schema = OffsetAllRequired(Positive()) self.assertIsNone(schema.validate([None, 2, 3])) self.assertEqual( - schema.validate([None, 2, None]), ["No values provided at rows: 3"] + schema.validate([None, 2, None]), ["No values provided at row(s): 3"] ) self.assertEqual(schema.validate([None, 2, -1]), ["Non-positive value: -1"]) @@ -204,7 +204,7 @@ def test_all_greater_equal(self): d = {"top": [0.0, -1.0], "bot": [0.0, -1.0]} self.assertIsNone(schema.validate(d)) d = {"bot": [1.0, 0.0], "top": [0.0, -1.0]} - expected = "top is not greater or equal to bot at rows: 1, 2" + expected = "top is not greater or equal to bot at row(s): 1, 2" self.assertEqual(schema.validate(d), expected) @@ -221,14 +221,15 @@ def test_first_only(self): class TestSemiConfined(TestCase): def test_semi_confined(self): schema = SemiConfined() - other = {"aquifer_top": [0.0, 1.0]} d = { + "aquifer_top": [0.0, 1.0], "aquitard_c": [1.0, None], "semiconf_top": [1.0, None], "semiconf_head": [1.0, None], } - self.assertIsNone(schema.validate(d, other)) + self.assertIsNone(schema.validate(d)) d = { + "aquifer_top": [0.0, 1.0], "aquitard_c": [None, None], "semiconf_top": [1.0, None], "semiconf_head": [1.0, None], @@ -238,14 +239,15 @@ def test_semi_confined(self): "filled in for aquitard_c, semiconf_top, semiconf_head. To disable semi-confined top, none " "of the values must be filled in. Found: None, 1.0, 1.0" ) - self.assertEqual(schema.validate(d, other), expected) + self.assertEqual(schema.validate(d), expected) d = { + "aquifer_top": [0.0, 1.0], "aquitard_c": [1.0, None], "semiconf_top": [-1.0, None], "semiconf_head": [1.0, None], } self.assertEqual( - schema.validate(d, other), + schema.validate(d), "semiconf_top must be greater than first aquifer_top.", ) From 217fa88c7911f867fe4bc20e31a44b61152da7be Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 14 Sep 2023 11:41:52 +0200 Subject: [PATCH 07/41] WIP --- plugin/qgistim/core/elements/__init__.py | 12 ++ plugin/qgistim/core/elements/aquifer.py | 10 +- .../core/elements/circular_area_sink.py | 2 +- plugin/qgistim/core/elements/constant.py | 24 +-- plugin/qgistim/core/elements/domain.py | 9 +- plugin/qgistim/core/elements/element.py | 69 +++++-- .../qgistim/core/elements/head_line_sink.py | 42 ++--- plugin/qgistim/core/elements/headwell.py | 75 ++++++-- .../core/elements/impermeable_line_doublet.py | 38 ++++ .../core/elements/leaky_line_doublet.py | 41 +++++ .../qgistim/core/elements/line_sink_ditch.py | 79 ++++++++ plugin/qgistim/core/elements/observation.py | 30 +-- .../core/elements/polygon_inhomogeneity.py | 70 +++++-- plugin/qgistim/core/elements/uniform_flow.py | 21 ++- plugin/qgistim/core/elements/well.py | 49 +++-- plugin/qgistim/core/extractor.py | 14 +- plugin/qgistim/core/formatting.py | 173 ++++++++++++++++++ plugin/qgistim/core/schemata.py | 2 +- plugin/qgistim/widgets/dataset_widget.py | 27 ++- plugin/qgistim/widgets/error_window.py | 20 +- test_plugin/test_schemata.py | 9 + 21 files changed, 663 insertions(+), 153 deletions(-) create mode 100644 plugin/qgistim/core/formatting.py diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py index 26b054f..7ec268f 100644 --- a/plugin/qgistim/core/elements/__init__.py +++ b/plugin/qgistim/core/elements/__init__.py @@ -8,6 +8,12 @@ from qgistim.core.elements.constant import Constant from qgistim.core.elements.domain import Domain from qgistim.core.elements.element import Element +from qgistim.core.elements.head_line_sink import HeadLineSink +from qgistim.core.elements.headwell import HeadWell +from qgistim.core.elements.impermeable_line_doublet import ImpermeableLineDoublet +from qgistim.core.elements.leaky_line_doublet import LeakyLineDoublet +from qgistim.core.elements.line_sink_ditch import LineSinkDitch +from qgistim.core.elements.observation import Observation from qgistim.core.elements.uniform_flow import UniformFlow from qgistim.core.elements.well import Well @@ -19,6 +25,12 @@ Constant, UniformFlow, Well, + HeadWell, + HeadLineSink, + LineSinkDitch, + LeakyLineDoublet, + ImpermeableLineDoublet, + Observation, ) } diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py index 35079d3..b28c51e 100644 --- a/plugin/qgistim/core/elements/aquifer.py +++ b/plugin/qgistim/core/elements/aquifer.py @@ -93,15 +93,9 @@ def remove_from_geopackage(self): def to_timml(self): data = self.table_to_dict(self.timml_layer) errors = self.schema.validate_timml(data) - if errors: - return errors, None - else: - return None, self.aquifer_data(data, transient=False) + return errors, data def to_ttim(self): data = self.table_to_dict(self.timml_layer) errors = self.schema.validate_ttim(data) - if errors: - return errors, None - else: - return None, self.aquifer_data(data, transient=False) + return errors, data diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index e8674f7..bb5d1b8 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -1,7 +1,7 @@ class CircularAreaSinkSchema: schemata = { "rate": Required(), - "layer": Required(Membership("layers")), + "layer": Required(Membership("aquifer layers")), } diff --git a/plugin/qgistim/core/elements/constant.py b/plugin/qgistim/core/elements/constant.py index 133c7bd..423993a 100644 --- a/plugin/qgistim/core/elements/constant.py +++ b/plugin/qgistim/core/elements/constant.py @@ -6,10 +6,11 @@ class ConstantSchema(ElementSchema): - consistency_schemata = (SingleRow(),) - schemata = { + timml_consistency_schemata = (SingleRow(),) + timml_schemata = { + "geometry": Required(), "head": Required(), - "layer": Membership("layers"), + "layer": Required(Membership("aquifer layers")), } @@ -21,19 +22,18 @@ class Constant(Element): QgsField("layer", QVariant.Int), QgsField("label", QVariant.String), ) + schema = ConstantSchema() @property def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=RED, name="star", size="5") - def to_timml(self): - data = self.to_dict() - # TODO: validate - data = data[0] + def process_timml_row(self, row): + x, y = self.point_xy(row) return { - "xr": data["geometry"][0][0], - "yr": data["geometry"][0][1], - "hr": data["head"], - "layer": data["layer"], - "label": data["label"], + "xr": x, + "yr": y, + "hr": row["head"], + "layer": row["layer"], + "label": row["label"], } diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index fe49b5d..f362b9f 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -10,11 +10,12 @@ ) from qgistim.core.elements.colors import BLACK from qgistim.core.elements.element import ElementSchema, TransientElement -from qgistim.core.schemata import SingleRow +from qgistim.core.schemata import Required, SingleRow class DomainSchema(ElementSchema): - timml_consistency_schemata = (SingleRow,) + timml_schemata = {"geometry": Required()} + timml_consistency_schemata = (SingleRow(),) class Domain(TransientElement): @@ -61,9 +62,9 @@ def update_extent(self, iface: Any) -> Tuple[float, float]: canvas.refresh() return ymax, ymin - def to_timml(self): + def to_timml(self, other): data = self.to_dict(self.timml_layer) - errors = self.schema.validate_timml(data) + errors = self.schema.validate_timml(data, other) if errors: return errors, None else: diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 9256065..1e95149 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -53,7 +53,7 @@ import abc from collections import defaultdict -from typing import Any, Dict, List +from typing import Any, Dict, List, Tuple from PyQt5.QtWidgets import ( QDialog, @@ -75,14 +75,40 @@ class ElementSchema(abc.ABC): - timml_schemata = {} - timml_consistency_schemata = {} - ttim_schemata = {} - ttim_consistency_schemata = {} + timml_schemata: Dict[str, Any] = {} + timml_consistency_schemata: Tuple[Any] = () + ttim_schemata: Dict[str, Any] = {} + ttim_consistency_schemata: Tuple[Any] = () + + def _validate_timml_rows(self, data, other=None): + errors = {} + + _errors = [] + for schema in self.timml_consistency_schemata: + _error = schema.validate(data, other) + if _error is not None: + _errors.append(_error) + if _errors: + errors["Table:"] = _errors + + for i, row in enumerate(data): + row_errors = defaultdict(list) + for variable, schema in self.timml_schemata.items(): + _error = schema.validate(row[variable], other) + if _error is not None: + row_errors[variable].append(_error) + + if row_errors: + errors[f"Row {i + 1}:"] = row_errors - def validate_timml(self, data, other=None) -> Dict[str, List]: + if errors: + return errors + else: + return None + + def _validate_timml_table(self, data, other=None): if len(data) == 0: - return {"table": ["Table is empty."]} + return {"Table:": ["Table is empty."]} errors = defaultdict(list) for variable, schema in self.timml_schemata.items(): @@ -90,17 +116,29 @@ def validate_timml(self, data, other=None) -> Dict[str, List]: if _errors is not None: errors[variable].extend(_errors) + # The consistency schema rely on the row input being valid. + # Hence, they are tested second. if not errors: for schema in self.timml_consistency_schemata: _error = schema.validate(data, other) if _error: - errors["table"].append(_error) + errors["Table:"].append(_error) if errors: return errors else: return None + def validate_timml(self, data, other=None) -> Dict[str, Any]: + if isinstance(data, list): + return self._validate_timml_rows(data, other) + elif isinstance(data, dict): + return self._validate_timml_table(data, other) + else: + raise TypeError( + f"Data should be list or dict, received: {type(data).__name__}" + ) + def validate_ttim(self, data, other=None) -> Dict[str, List]: raise NotImplementedError @@ -274,8 +312,16 @@ def on_transient_changed(self, transient: bool): self.timml_layer.setAttributeTableConfig(config) return - @abc.abstractmethod def to_timml(self, other=None): + data = self.to_dict(self.timml_layer) + errors = self.schema.validate_timml(data, other) + if errors: + return errors, data + else: + elements = [self.process_timml_row(row) for row in data] + return None, elements + + def to_ttim(self, other=None): pass @staticmethod @@ -323,12 +369,11 @@ def interleave(a, b): porosity = interleave(data["aquitard_npor"], data["aquifer_npor"]) s_aquifer = data["aquifer_s"] s_aquitard = data["aquitard_s"] - topboundary = "semi" if semiconfined: - topboundary = "conf" - else: topboundary = "semi" + else: + topboundary = "conf" z.pop(0) c.pop(0) porosity.pop(0) diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index 059f4a7..cc6a00d 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -1,7 +1,7 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import BLUE -from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllOrNone, Membership, @@ -13,23 +13,20 @@ ) -class HeadLineSinkSchema: - schemata = { +class HeadLineSinkSchema(ElementSchema): + timml_schemata = { "head": Required(), "resistance": Required(Positive()), "width": Required(Positive()), "order": Required(Positive()), - "layer": Required(Membership("layers")), + "layer": Required(Membership("aquifer layers")), } - - -class TransientHeadLineSinkSchema: - consistency_schemata = ( + ttim_consistency_schemata = ( AllOrNone("time_start", "time_end", "head_transient"), NotBoth("time_start", "timeseries_id"), Time(), ) - schemata = { + ttim_schemata = { "time_start": Optional(Time()), "time_end": Optional(Time()), } @@ -64,27 +61,22 @@ class HeadLineSink(TransientElement): "head_transient", "timeseries_id", ) + schema = HeadLineSinkSchema() @property def renderer(self) -> QgsSingleSymbolRenderer: return self.line_renderer(color=BLUE, width="0.75") - def to_timml(self): - data = self.to_dict(self.timml_layer) - sinks = [] - for row in data: - sinks.append( - { - "xy": self.linestring_xy(row), - "hls": row["head"], - "res": row["resistance"], - "wh": row["width"], - "order": row["order"], - "layers": row["layer"], - "label": row["label"], - } - ) - return sinks + def process_timml_row(self, row): + return { + "xy": self.linestring_xy(row), + "hls": row["head"], + "res": row["resistance"], + "wh": row["width"], + "order": row["order"], + "layers": row["layer"], + "label": row["label"], + } def to_ttim(self, time_start): data = self.to_dict(self.timml_layer) diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 72b2050..39edd28 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -1,17 +1,68 @@ -class HeadWellSchema: - schemata = { - "head": [AllValueSchema()], - "radius": [AllValueSchema(), PositiveSchema()], - "resistance": [AllValueSchema(), PositiveSchema()], - "layer": [AllValueSchema(), MemberschipSchema("layers")], +from typing import Any, Dict + +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import BLUE +from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.schemata import AllOrNone, Membership, NotBoth, Positive, Required + + +class HeadWellSchema(ElementSchema): + timml_schemata = { + "head": Required(), + "radius": Required(Positive()), + "resistance": Required(Positive()), + "layer": Required(Membership("aquifer_layers")), } + ttim_consistency_schemata = ( + AllOrNone(("time_start", "time_end", "head_transient")), + NotBoth("time_start", "timeseries_id"), + ) -class TransientHeadWellSchema: - schemata = { - "timeseries_id": [MembershipSchema("timeseries_ids")], +class HeadWell(TransientElement): + element_type = "Head Well" + geometry_type = "Point" + timml_attributes = ( + QgsField("head", QVariant.Double), + QgsField("radius", QVariant.Double), + QgsField("resistance", QVariant.Double), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + QgsField("time_start", QVariant.Double), + QgsField("time_end", QVariant.Double), + QgsField("head_transient", QVariant.Double), + QgsField("timeseries_id", QVariant.Int), + ) + ttim_attributes = ( + QgsField("timeseries_id", QVariant.Int), + QgsField("time_start", QVariant.Double), + QgsField("head", QVariant.Double), + ) + timml_defaults = { + "radius": QgsDefaultValue("0.1"), + "resistance": QgsDefaultValue("0.0"), } - consistency_schemata = ( - AllOrNoneSchema(("time_start", "time_end", "head_transient")), - XorSchema("time_start", "timeseries_id"), + transient_columns = ( + "time_start", + "time_end", + "head_transient", + "timeseries_id", ) + schema = HeadWellSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.marker_renderer(color=BLUE, size="3") + + def process_timml_row(self, row) -> Dict[str, Any]: + x, y = self.point_xy(row) + return { + "xw": x, + "yw": y, + "hw": row["discharge"], + "rw": row["radius"], + "res": row["head"], + "layers": row["layer"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py index e69de29..1f5c8be 100644 --- a/plugin/qgistim/core/elements/impermeable_line_doublet.py +++ b/plugin/qgistim/core/elements/impermeable_line_doublet.py @@ -0,0 +1,38 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import RED +from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.schemata import Membership, Positive, Required + + +class ImpermeableLineDoubletSchema(ElementSchema): + timml_schemata = { + "order": Required(Positive()), + "layer": Required(Membership("aquifer layers")), + } + + +class ImpermeableLineDoublet(Element): + element_type = "Impermeable Line Doublet" + geometry_type = "Linestring" + timml_attributes = ( + QgsField("order", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + ) + timml_defaults = { + "order": QgsDefaultValue("4"), + } + schema = ImpermeableLineDoubletSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.line_renderer(color=RED, width="0.75") + + def process_timml_row(self, row): + return { + "xy": self.linestring_xy(row), + "layers": row["layer"], + "order": row["order"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/leaky_line_doublet.py b/plugin/qgistim/core/elements/leaky_line_doublet.py index e69de29..06adf06 100644 --- a/plugin/qgistim/core/elements/leaky_line_doublet.py +++ b/plugin/qgistim/core/elements/leaky_line_doublet.py @@ -0,0 +1,41 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import RED +from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.schemata import Membership, Positive, Required + + +class LeakyLineDoubletSchema(ElementSchema): + timml_schemata = { + "resistance": Required(Positive()), + "order": Required(Positive()), + "layer": Required(Membership("aquifer layers")), + } + + +class LeakyLineDoublet(Element): + element_type = "Leaky Line Doublet" + geometry_type = "Linestring" + timml_attributes = ( + QgsField("resistance", QVariant.Double), + QgsField("order", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + ) + timml_defaults = { + "order": QgsDefaultValue("4"), + } + schema = LeakyLineDoubletSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.line_renderer(color=RED, width="0.75", outline_style="dash") + + def process_timml_row(self, row): + return { + "xy": self.linestring_xy(row), + "res": row["resistance"], + "layers": row["layer"], + "order": row["order"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index e69de29..fb8667e 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -0,0 +1,79 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import GREEN +from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.schemata import ( + AllOrNone, + Membership, + NotBoth, + Optional, + Positive, + Required, + Time, +) + + +class LineSinkDitchSchema(ElementSchema): + timml_schemata = { + "discharge": Required(), + "resistance": Required(Positive()), + "width": Required(Positive()), + "order": Required(Positive()), + "layer": Required(Membership("aquifer layers")), + } + ttim_consistency_schemata = ( + AllOrNone("time_start", "time_end", "discharge_transient"), + NotBoth("time_start", "timeseries_id"), + Time(), + ) + ttim_schemata = { + "time_start": Optional(Time()), + "time_end": Optional(Time()), + } + + +class LineSinkDitch(TransientElement): + element_type = "Line Sink Ditch" + geometry_type = "Linestring" + timml_attributes = ( + QgsField("discharge", QVariant.Double), + QgsField("resistance", QVariant.Double), + QgsField("width", QVariant.Double), + QgsField("order", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + QgsField("time_start", QVariant.Double), + QgsField("time_end", QVariant.Double), + QgsField("discharge_transient", QVariant.Double), + QgsField("timeseries_id", QVariant.Int), + ) + ttim_attributes = ( + QgsField("timeseries_id", QVariant.Int), + QgsField("time_start", QVariant.Double), + QgsField("discharge", QVariant.Double), + ) + timml_defaults = { + "order": QgsDefaultValue("4"), + } + transient_columns = ( + "time_start", + "time_end", + "discharge_transient", + "timeseries_id", + ) + schema = LineSinkDitchSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.line_renderer(color=GREEN, width="0.75") + + def process_timml_row(self, row): + return { + "xy": self.linestring_xy(row), + "Qls": row["discharge"], + "res": row["resistance"], + "wh": row["width"], + "order": row["order"], + "layers": row["layer"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index b3bbb95..0dbd97d 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -1,7 +1,14 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import LIGHT_BLUE -from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.schemata import Required + + +class ObservationSchema(ElementSchema): + timml_schemata = { + "geometry": Required(), + } class Observation(TransientElement): @@ -22,21 +29,16 @@ class Observation(TransientElement): "timeseries_id": QgsDefaultValue("1"), } transient_columns = ("timeseries_id",) + schema = ObservationSchema() @property def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") - def to_timml(self): - data = self.to_dict(self.timml_layer) - - observations = [] - for row in data: - observations.append( - { - "x": row["geometry"][0][0], - "y": row["geometry"][0][1], - "label": row["label"], - } - ) - return observations + def process_timml_row(self, row): + x, y = self.point_xy(row) + return { + "x": x, + "y": y, + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py index 4c87a36..691c628 100644 --- a/plugin/qgistim/core/elements/polygon_inhomogeneity.py +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -1,19 +1,50 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREY, TRANSPARENT_GREY -from qgistim.core.elements.element import AssociatedElement +from qgistim.core.elements.element import AssociatedElement, ElementSchema from qgistim.core.schemata import ( - AllOrNone, + AllGreaterEqual, + AllOptional, + AllRequired, + FirstOnly, Membership, - NotBoth, - Optional, + OffsetAllRequired, Positive, + Range, Required, SemiConfined, - Time, + StrictlyDecreasing, ) +class PolygonInhomgeneitySchema(ElementSchema): + timml_schemata = { + "inhomogeneity_id": Required(Membership("inhomogeneity_id")), + "order": Required(Positive()), + "ndegrees": Required(Positive()), + } + + +class AssociatedPolygonInhomogeneitySchema(ElementSchema): + timml_schemata = { + "inhomogeneity_id": AllRequired(), + "layer": AllRequired(Range()), + "aquifer_top": AllRequired(StrictlyDecreasing()), + "aquifer_bottom": AllRequired(StrictlyDecreasing()), + "aquitard_c": OffsetAllRequired(Positive()), + "aquifer_k": AllRequired(Positive()), + "semiconf_top": AllOptional(FirstOnly()), + "semiconf_head": AllOptional(FirstOnly()), + } + timml_consistency_schemata = ( + SemiConfined(), + AllGreaterEqual("aquifer_top", "aquifer_bottom"), + ) + + +# TODO: each group should have equal length + + class PolygonInhomogeneity(AssociatedElement): element_type = "Polygon Inhomogeneity" geometry_type = "Polygon" @@ -51,6 +82,8 @@ class PolygonInhomogeneity(AssociatedElement): "aquitard_npor", "aquifer_npor", ) + schema = PolygonInhomgeneitySchema() + assoc_schema = AssociatedPolygonInhomogeneitySchema() @property def renderer(self) -> QgsSingleSymbolRenderer: @@ -72,14 +105,19 @@ def on_transient_changed(self, transient: bool): def to_timml(self): data = self.to_dict(self.timml_layer) assoc_data = self.table_to_dict(self.assoc_layer) - grouped = self.groupby(assoc_data, "inhomogeneity_id") - - inhoms = [] - for row in data: - inhom_id = row["inhomogeneity_id"] - kwargs = self._aquifer_data(grouped[inhom_id]) - kwargs["xy"] = self._polygon_xy(row) - kwargs["order"] = row["order"] - kwargs["ndeg"] = row["ndegrees"] - inhoms.append(kwargs) - return inhoms + timml_errors = self.schema.validate_timml(data) + assoc_errors = self.assoc_schema.validate_timml(assoc_data) + errors = {**timml_errors, **assoc_errors} + if errors: + return errors, None + else: + grouped = self.groupby(assoc_data, "inhomogeneity_id") + inhoms = [] + for row in data: + inhom_id = row["inhomogeneity_id"] + kwargs = self._aquifer_data(grouped[inhom_id]) + kwargs["xy"] = self._polygon_xy(row) + kwargs["order"] = row["order"] + kwargs["ndeg"] = row["ndegrees"] + inhoms.append(kwargs) + return inhoms diff --git a/plugin/qgistim/core/elements/uniform_flow.py b/plugin/qgistim/core/elements/uniform_flow.py index d93364f..c013783 100644 --- a/plugin/qgistim/core/elements/uniform_flow.py +++ b/plugin/qgistim/core/elements/uniform_flow.py @@ -1,6 +1,14 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsField -from qgistim.core.elements.element import Element +from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.schemata import Required + + +class UniformFlowSchema(ElementSchema): + timml_schemata = { + "slope": Required(), + "angle": Required(), + } class UniformFlow(Element): @@ -11,11 +19,8 @@ class UniformFlow(Element): QgsField("angle", QVariant.Double), QgsField("label", QVariant.String), ) + schema = UniformFlowSchema() - def to_timml(self): - data = self.to_dict(self.timml_layer) - return { - "slope": data["slope"], - "angle": data["angle"], - "label": data["label"], - } + def process_timml_row(self, row): + # No renaming or changes required + return row diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index 5fe8d05..b59c2c8 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -1,7 +1,9 @@ +from typing import Any, Dict + from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREEN -from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllOrNone, Membership, @@ -13,17 +15,15 @@ ) -class WellSchema: - schemata = { +class WellSchema(ElementSchema): + timml_schemata = { + "geometry": Required(), "discharge": Required(), - "radius": Required(), - "resistance": Required(), - "layer": Required(Membership("layers")), + "radius": Required(Positive()), + "resistance": Required(Positive()), + "layer": Required(Membership("aquifer layers")), } - - -class TransientWellSchema: - schemata = { + ttim_schemata = { "caisson_radius": Required(Positive), "slug": Required(), "time_start": Optional(Time()), @@ -71,28 +71,23 @@ class Well(TransientElement): "slug", "timeseries_id", ) + schema = WellSchema() @property def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=GREEN, size="3") - def to_timml(self): - data = self.to_dict(self.timml_layer) - wells = [] - for row in data: - x, y = self.point_xy(row) - wells.append( - { - "xw": x, - "yw": y, - "Q": row["discharge"], - "rw": row["radius"], - "res": row["resistance"], - "layers": row["layer"], - "label": row["label"], - } - ) - return wells + def process_timml_row(self, row) -> Dict[str, Any]: + x, y = self.point_xy(row) + return { + "xw": x, + "yw": y, + "Qw": row["discharge"], + "rw": row["radius"], + "res": row["resistance"], + "layers": row["layer"], + "label": row["label"], + } def to_ttim(self, time_start): data = self.to_dict(self.timml_layer) diff --git a/plugin/qgistim/core/extractor.py b/plugin/qgistim/core/extractor.py index 8bb1779..65909ba 100644 --- a/plugin/qgistim/core/extractor.py +++ b/plugin/qgistim/core/extractor.py @@ -1,6 +1,6 @@ import abc from collections import defaultdict -from typing import Any, Dict +from typing import Any, Dict, List from qgis.core import NULL @@ -24,7 +24,7 @@ def extract_coordinates(feature): return coordinates @classmethod - def to_dict(cls, layer) -> Dict[str, Any]: + def to_dict(cls, layer) -> List[Dict[str, Any]]: features = [] for feature in layer.getFeatures(): data = feature.attributeMap() @@ -46,11 +46,11 @@ def table_to_dict(cls, layer) -> Dict[str, Any]: features[key].append(value) # Sort by layer if present. - if "layer" in features: - order = cls.argsort(features["layer"]) - return {k: [v[i] for i in order] for k, v in features.items()} - else: - return features + # TODO: is this smart? A user may easily miss the layer column. + # if "layer" in features: + # order = cls.argsort(features["layer"]) + # return {k: [v[i] for i in order] for k, v in features.items()} + return features @staticmethod def point_xy(row): diff --git a/plugin/qgistim/core/formatting.py b/plugin/qgistim/core/formatting.py new file mode 100644 index 0000000..b98fb7b --- /dev/null +++ b/plugin/qgistim/core/formatting.py @@ -0,0 +1,173 @@ +""" +Format the content of a collection of dictionaries into a Python script. +""" + +import pprint +import re +import textwrap +from typing import Any, Dict, Tuple + +import numpy as np + +MAPPING = { + "Constant": "Constant", + "Uniform Flow": "Uflow", + "Circular Area Sink": "CircAreaSink", + "Well": "Well", + "Head Well": "HeadWell", + "Polygon Inhomogeneity": "PolygonInhomMaq", + "Polygon Area Sink": "PolygonInhomMaq", + "Polygon Semi-Confined Top": "PolygonInhomMaq", + "Head Line Sink": "HeadLineSinkString", + "Line Sink Ditch": "LineSinkDitchString", + "Leaky Line Doublet": "LeakyLineDoubletString", + "Impermeable Line Doublet": "ImpLineDoubletString", + "Building Pit": "BuildingPit", + "Leaky Building Pit": "LeakyBuildingPit", + "Observation": "Observation", +} +PREFIX = " " + + +def sanitized(name: str) -> str: + return name.split(":")[-1].replace(" ", "_") + + +def format_kwargs(data: Dict[str, Any]) -> str: + return textwrap.indent( + "\n".join(f"{k}={pprint.pformat(v)}," for k, v in data.items()), prefix=PREFIX + ) + + +def round_extent(domain: Dict[str, float], cellsize: float) -> Tuple[float]: + """ + Increases the extent until all sides lie on a coordinate + divisible by cellsize. + + Parameters + ---------- + extent: Tuple[float] + xmin, xmax, ymin, ymax + cellsize: float + Desired cell size of the output head grids + + Returns + ------- + extent: Tuple[float] + xmin, xmax, ymin, ymax + """ + xmin = domain["xmin"] + ymin = domain["ymin"] + xmax = domain["xmax"] + ymax = domain["ymax"] + xmin = np.floor(xmin / cellsize) * cellsize + ymin = np.floor(ymin / cellsize) * cellsize + xmax = np.ceil(xmax / cellsize) * cellsize + ymax = np.ceil(ymax / cellsize) * cellsize + xmin += 0.5 * cellsize + xmax += 0.5 * cellsize + ymax -= 0.5 * cellsize + xmin -= 0.5 * cellsize + return xmin, xmax, ymin, ymax + + +def headgrid_entry(domain: Dict[str, float], cellsize: float) -> Dict[str, float]: + (xmin, xmax, ymin, ymax) = round_extent(domain, cellsize) + return { + "xmin": xmin, + "xmax": xmax, + "ymin": ymin, + "ymax": ymax, + "spacing": cellsize, + } + + +def headgrid_code(domain) -> Tuple[str, str]: + ymin = domain["ymin"] + ymax = domain["ymax"] + dy = (ymax - ymin) / 50.0 + # Some reasonable defaults for grid spacing: + if dy > 500.0: + dy = round(dy / 500.0) * 500.0 + elif dy > 50.0: + dy = round(dy / 50.0) * 50.0 + elif dy > 5.0: # round to five + dy = round(dy / 5.0) * 5.0 + elif dy > 1.0: + dy = round(dy) + (xmin, xmax, ymin, ymax) = round_extent(domain, dy) + xg = textwrap.indent(f"xg=np.arange({xmin}, {xmax}, {dy})", prefix=PREFIX) + yg = textwrap.indent(f"yg=np.arange({ymax}, {ymin}, -{dy})", prefix=PREFIX) + return xg, yg + + +def to_script_string(data: Dict[str, Any]) -> str: + """ + Convert the data into a runnable Python script. + + Returns + ------- + data: Dict[str, Any] + All the data for the TimML model. + script: str + A runnable Python script. + """ + data = data.copy() # avoid side-effects + aquifer_data = data.pop("timml Aquifer:Aquifer") + domain_data = data.pop("timml Domain:Domain") + + strings = [ + "import numpy as np", + "import timml", + "", + f"model = timml.ModelMaq(\n{format_kwargs(aquifer_data)}\n)", + ] + + model_string = textwrap.indent("model=model,", prefix=PREFIX) + observations = [] + for layername, element_data in data.items(): + prefix, name = layername.split(":") + plugin_name = re.split("timml |ttim ", prefix)[1] + timml_name = MAPPING[plugin_name] + + for i, kwargs in enumerate(element_data): + if plugin_name == "Observation": + # Should not be added to the model. + kwargs.pop("label") + observations.append( + f"observation_{sanitized(name)}_{i}=model.head(\n{format_kwargs(kwargs)}\n)" + ) + else: + # Has to be added to the model. + kwargs = format_kwargs(kwargs) + strings.append( + f"{sanitized(name)}_{i} = timml.{timml_name}(\n{model_string}\n{kwargs}\n)" + ) + + strings.append("\nmodel.solve()\n") + + xg, yg = headgrid_code(domain_data) + strings.append(f"head = model.headgrid(\n{xg},\n{yg}\n)") + strings.append("\n") + strings.extend(observations) + + return "\n".join(strings) + + +def to_json(data: Dict[str, Any]) -> Dict[str, Any]: + data = data.copy() # avoid side-effects + aquifer_data = data.pop("timml Aquifer:Aquifer") + domain_data = data.pop("timml Domain:Domain") + + json_data = { + "ModelMaq": aquifer_data, + "headgrid": headgrid_entry(domain_data, domain_data["cellsize"]), + } + + for layername, element_data in data.items(): + prefix, name = layername.split(":") + plugin_name = re.split("timml |ttim ", prefix)[1] + timml_name = MAPPING[plugin_name] + json_data[layername] = {"type": timml_name, "name": name, "data": element_data} + + return json_data diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index 739da68..ad9ddf6 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -295,7 +295,7 @@ def validate(self, data, _=None) -> MaybeError: class SingleRow(IterableSchema): - def validate(self, data, other=None) -> MaybeError: + def validate(self, data, _=None) -> MaybeError: if len(data) != 1: return "Table may contain only one row." return None diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index b4e9851..5127547 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -206,8 +206,20 @@ def convert_to_timml(self): } data = {} errors = {} + + # First convert the aquifer, since we need its data to validate + # other elements. + name = "timml Aquifer:Aquifer" + aquifer = elements.pop(name) + _errors, raw_data = aquifer.to_timml() + other = {"aquifer layers": raw_data["layer"]} + if _errors: + errors[name] = _errors + else: + data[name] = aquifer.aquifer_data(raw_data, transient=False) + for name, element in elements.items(): - _errors, _data = element.to_timml() + _errors, _data = element.to_timml(other) if _errors: errors[name] = _errors else: @@ -467,6 +479,17 @@ def convert_to_json(self) -> None: self.validation_dialog = ValidationDialog(errors) return - json_string = json.dumps(data, indent=4) + # TODO + data["timml Domain:Domain"][ + "cellsize" + ] = self.parent.compute_widget.cellsize_spin_box.value() + + from qgistim.core.formatting import to_json, to_script_string + + to_python = to_script_string(data) + print(to_python) + + json_data = to_json(data) + json_string = json.dumps(json_data, indent=4) print(json_string) return diff --git a/plugin/qgistim/widgets/error_window.py b/plugin/qgistim/widgets/error_window.py index 6cf94e8..2f922ca 100644 --- a/plugin/qgistim/widgets/error_window.py +++ b/plugin/qgistim/widgets/error_window.py @@ -3,14 +3,26 @@ from PyQt5.QtWidgets import QDialog, QHBoxLayout, QPushButton, QTextBrowser, QVBoxLayout +def format_list(errors: Dict[str, Any]): + """Recursive formatting""" + messages = [] + for variable, var_errors in errors.items(): + if isinstance(var_errors, list): + messages.append(f"

{variable}

    ") + messages.extend(f"
  • {error}
  • " for error in var_errors) + messages.append("
") + else: + messages.append(f"

{variable}

  • ") + messages.extend(format_list(var_errors)) + messages.append("
") + return messages + + def format_errors(errors: Dict[str, Dict[str, Any]]): messages = [] for element, element_errors in errors.items(): messages.append(f"{element}") - for variable, var_errors in element_errors.items(): - messages.append("
    ") - messages.extend(f"
  • {variable}: {error}
  • " for error in var_errors) - messages.append("
") + messages.extend(format_list(element_errors)) return "".join(messages) diff --git a/test_plugin/test_schemata.py b/test_plugin/test_schemata.py index 9b0eb06..969a7fa 100644 --- a/test_plugin/test_schemata.py +++ b/test_plugin/test_schemata.py @@ -17,6 +17,7 @@ Range, Required, SemiConfined, + SingleRow, StrictlyDecreasing, StrictlyIncreasing, ) @@ -251,3 +252,11 @@ def test_semi_confined(self): schema.validate(d), "semiconf_top must be greater than first aquifer_top.", ) + + +class TestSingleRow(TestCase): + def test_single_row(self): + schema = SingleRow() + self.assertIsNone(schema.validate([{"a": 1}])) + data = [{"a": 1}, {"a": 2}] + self.assertEqual(schema.validate(data), "Table may contain only one row.") From cb5146c6a2a85e48bc2ced0aba5d13dbaee021ef Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Sun, 17 Sep 2023 17:40:02 +0200 Subject: [PATCH 08/41] More consistent typing on Errors --- plugin/qgistim/core/elements/__init__.py | 9 +- plugin/qgistim/core/elements/aquifer.py | 17 +- .../core/elements/circular_area_sink.py | 86 +++++++-- plugin/qgistim/core/elements/domain.py | 5 +- plugin/qgistim/core/elements/element.py | 168 +++++++++++------- .../qgistim/core/elements/head_line_sink.py | 12 +- plugin/qgistim/core/elements/headwell.py | 14 +- .../core/elements/impermeable_line_doublet.py | 16 ++ .../qgistim/core/elements/line_sink_ditch.py | 12 +- plugin/qgistim/core/elements/observation.py | 6 +- .../core/elements/polygon_inhomogeneity.py | 7 +- plugin/qgistim/core/elements/well.py | 11 +- plugin/qgistim/core/extractor.py | 6 +- plugin/qgistim/core/formatting.py | 15 ++ plugin/qgistim/core/schemata.py | 157 ++++++++-------- test_plugin/test_aquifer_schemata.py | 8 +- test_plugin/test_schemata.py | 54 +++--- 17 files changed, 383 insertions(+), 220 deletions(-) diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py index 7ec268f..4ca1520 100644 --- a/plugin/qgistim/core/elements/__init__.py +++ b/plugin/qgistim/core/elements/__init__.py @@ -5,6 +5,7 @@ from qgistim.core import geopackage from qgistim.core.elements.aquifer import Aquifer +from qgistim.core.elements.circular_area_sink import CircularAreaSink from qgistim.core.elements.constant import Constant from qgistim.core.elements.domain import Domain from qgistim.core.elements.element import Element @@ -28,8 +29,14 @@ HeadWell, HeadLineSink, LineSinkDitch, - LeakyLineDoublet, + CircularAreaSink, ImpermeableLineDoublet, + LeakyLineDoublet, + # PolygonAreaSink, + # PolygonSemiConfinedTop, + # PolygonInhomogeneity, + # BuildingPit, + # LeakyBuildingPit, Observation, ) } diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py index b28c51e..e8ba1cc 100644 --- a/plugin/qgistim/core/elements/aquifer.py +++ b/plugin/qgistim/core/elements/aquifer.py @@ -4,13 +4,13 @@ from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllGreaterEqual, - AllOptional, AllRequired, - FirstOnly, OffsetAllRequired, + OptionalFirstOnly, Positive, Range, SemiConfined, + SingleRow, StrictlyDecreasing, ) @@ -22,8 +22,8 @@ class AquiferSchema(ElementSchema): "aquifer_bottom": AllRequired(StrictlyDecreasing()), "aquitard_c": OffsetAllRequired(Positive()), "aquifer_k": AllRequired(Positive()), - "semiconf_top": AllOptional(FirstOnly()), - "semiconf_head": AllOptional(FirstOnly()), + "semiconf_top": OptionalFirstOnly(), + "semiconf_head": OptionalFirstOnly(), } timml_consistency_schemata = ( SemiConfined(), @@ -33,6 +33,7 @@ class AquiferSchema(ElementSchema): "aquitard_s": OffsetAllRequired(Positive()), "aquifer_s": AllRequired(Positive()), } + ttim_consistency_schemata = (SingleRow(),) class Aquifer(TransientElement): @@ -53,16 +54,12 @@ class Aquifer(TransientElement): ] ttim_attributes = ( QgsField("time_min", QVariant.Double), - QgsField("time_max", QVariant.Double), - QgsField("time_start", QVariant.Double), - QgsField("stehfest_M", QVariant.Int), + QgsField("laplace_inversion_M", QVariant.Int), QgsField("reference_date", QVariant.DateTime), ) ttim_defaults = { "time_min": QgsDefaultValue("0.01"), - "time_max": QgsDefaultValue("10.0"), - "time_start": QgsDefaultValue("0.0"), - "stehfest_M": QgsDefaultValue("10"), + "laplace_inversion_M": QgsDefaultValue("10"), } transient_columns = ( "aquitard_s", diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index bb5d1b8..9eb1306 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -1,17 +1,83 @@ -class CircularAreaSinkSchema: - schemata = { +from typing import Any, Dict + +from PyQt5.QtCore import QVariant +from qgis.core import QgsField +from qgistim.core.elements.colors import GREEN, TRANSPARENT_GREEN +from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.schemata import ( + AllOrNone, + AllRequired, + CircularGeometry, + Membership, + NotBoth, + Optional, + Positive, + Required, +) + + +class CircularAreaSinkSchema(ElementSchema): + timml_schemata = { + "geometry": Required(CircularGeometry()), "rate": Required(), "layer": Required(Membership("aquifer layers")), } - - -class TransientCircularAreaSinkSchema: - consistency_schemata = ( + ttim_schemata = { + "time_start": Optional(Positive()), + "time_end": Optional(Positive()), + "timeseries_id": Optional(Membership("timeseries_ids")), + } + ttim_consistency_schemata = ( AllOrNone("time_start", "time_end", "rate_transient"), NotBoth("time_start", "timeseries_id"), ) - schemata = { - "time_start": Optional(Time()), - "time_end": Optional(Time()), - "timeseries_id": Optional(Membership("timeseries_ids")), + timeseries_schemata = { + "timeseries_id": AllRequired(), + "time_start": AllRequired(Positive()), + "rate": AllRequired(), } + + +class CircularAreaSink(TransientElement): + element_type = "Circular Area Sink" + geometry_type = "Polygon" + timml_attributes = ( + QgsField("rate", QVariant.Double), + QgsField("layer", QVariant.Int), + QgsField("label", QVariant.String), + QgsField("time_start", QVariant.Double), + QgsField("time_end", QVariant.Double), + QgsField("rate_transient", QVariant.Double), + QgsField("timeseries_id", QVariant.Int), + ) + ttim_attributes = ( + QgsField("timeseries_id", QVariant.Int), + QgsField("time_start", QVariant.Double), + QgsField("rate", QVariant.Double), + ) + transient_columns = ( + "time_start", + "time_end", + "rate_transient", + "timeseries_id", + ) + schema = CircularAreaSinkSchema() + + @property + def renderer(self): + return self.polygon_renderer( + color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" + ) + + def process_timml_row(self, row) -> Dict[str, Any]: + # Take the first vertex. + x, y = self.point_xy(row) + # Compare with the centroid to derive radius. + xc, yc = row["centroid"] + radius = ((x - xc) ** 2 + (y - yc) ** 2) ** 0.5 + return { + "xc": xc, + "yc": yc, + "R": radius, + "N": row["rate"], + } diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index f362b9f..c222b22 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -10,12 +10,15 @@ ) from qgistim.core.elements.colors import BLACK from qgistim.core.elements.element import ElementSchema, TransientElement -from qgistim.core.schemata import Required, SingleRow +from qgistim.core.schemata import AllRequired, Positive, Required, SingleRow class DomainSchema(ElementSchema): timml_schemata = {"geometry": Required()} timml_consistency_schemata = (SingleRow(),) + timeseries_schemata = { + "time": AllRequired(Positive()), + } class Domain(TransientElement): diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 1e95149..84a2008 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -79,68 +79,78 @@ class ElementSchema(abc.ABC): timml_consistency_schemata: Tuple[Any] = () ttim_schemata: Dict[str, Any] = {} ttim_consistency_schemata: Tuple[Any] = () + timeseries_schemata: Dict[str, Any] = {} - def _validate_timml_rows(self, data, other=None): - errors = {} + @staticmethod + def _validate_rows(schemata, consistency_schemata, data, other=None): + errors = defaultdict(list) - _errors = [] - for schema in self.timml_consistency_schemata: + for schema in consistency_schemata: _error = schema.validate(data, other) - if _error is not None: - _errors.append(_error) - if _errors: - errors["Table:"] = _errors + if _error: + errors["Table:"].append(_error) for i, row in enumerate(data): row_errors = defaultdict(list) - for variable, schema in self.timml_schemata.items(): - _error = schema.validate(row[variable], other) - if _error is not None: - row_errors[variable].append(_error) + for variable, schema in schemata.items(): + _errors = schema.validate(row[variable], other) + if _errors: + row_errors[variable].extend(_errors) if row_errors: errors[f"Row {i + 1}:"] = row_errors - if errors: - return errors - else: - return None + return errors - def _validate_timml_table(self, data, other=None): + @staticmethod + def _validate_table(schemata, consistency_schemata, data, other=None): if len(data) == 0: return {"Table:": ["Table is empty."]} errors = defaultdict(list) - for variable, schema in self.timml_schemata.items(): + for variable, schema in schemata.items(): _errors = schema.validate(data[variable], other) - if _errors is not None: + if _errors: errors[variable].extend(_errors) # The consistency schema rely on the row input being valid. # Hence, they are tested second. if not errors: - for schema in self.timml_consistency_schemata: + for schema in consistency_schemata: _error = schema.validate(data, other) if _error: errors["Table:"].append(_error) - if errors: - return errors - else: - return None + return errors - def validate_timml(self, data, other=None) -> Dict[str, Any]: + @classmethod + def _validate( + cls, schemata, consistency_schemata, data, other=None + ) -> Dict[str, Any]: if isinstance(data, list): - return self._validate_timml_rows(data, other) + return cls._validate_rows(schemata, consistency_schemata, data, other) elif isinstance(data, dict): - return self._validate_timml_table(data, other) + return cls._validate_table(schemata, consistency_schemata, data, other) else: raise TypeError( f"Data should be list or dict, received: {type(data).__name__}" ) - def validate_ttim(self, data, other=None) -> Dict[str, List]: - raise NotImplementedError + @classmethod + def validate_timml(cls, data, other=None): + return cls._validate( + cls.timml_schemata, cls.timml_consistency_schemata, data, other + ) + + @classmethod + def validate_ttim(cls, data, other=None): + return cls._validate( + cls.ttim_schemata, cls.ttim_consistency_schemata, data, other + ) + + @classmethod + def validate_timeseries(cls, data, other=None): + return cls._validate_table(cls.timeseries_schemata, (), data, other) class NameDialog(QDialog): @@ -298,18 +308,7 @@ def write(self): def remove_from_geopackage(self): geopackage.remove_layer(self.path, self.timml_name) - def on_transient_changed(self, transient: bool): - if len(self.transient_columns) == 0: - return - - config = self.timml_layer.attributeTableConfig() - columns = config.columns() - - for i, column in enumerate(columns): - if column.name in self.transient_columns: - config.setColumnHidden(i, not transient) - - self.timml_layer.setAttributeTableConfig(config) + def on_transient_changed(self, _): return def to_timml(self, other=None): @@ -319,35 +318,10 @@ def to_timml(self, other=None): return errors, data else: elements = [self.process_timml_row(row) for row in data] - return None, elements + return [], elements def to_ttim(self, other=None): - pass - - @staticmethod - def transient_input(row, all_timeseries, variable: str, time_start): - timeseries_id = row["timeseries_id"] - row_start = row["time_start"] - row_end = row["time_end"] - steady_value = row[variable] - transient_value = row[f"{variable}_transient"] - - start_and_stop = ( - row_start is not None - and row_end is not None - and transient_value is not None - ) - - if start_and_stop: - return [(row_start, transient_value - steady_value), (row_end, 0.0)] - elif timeseries_id is not None: - timeseries = all_timeseries[timeseries_id] - return [ - (time, value - steady_value) - for time, value in zip(timeseries["time_start"], timeseries[variable]) - ] - else: - return [(time_start), 0.0] + return self.to_timml(other) @staticmethod def aquifer_data(data, transient: bool): @@ -439,6 +413,64 @@ def remove_from_geopackage(self): geopackage.remove_layer(self.path, self.timml_name) geopackage.remove_layer(self.path, self.ttim_name) + @staticmethod + def transient_input(row, all_timeseries, variable: str, time_start): + timeseries_id = row["timeseries_id"] + row_start = row["time_start"] + row_end = row["time_end"] + steady_value = row[variable] + transient_value = row[f"{variable}_transient"] + + start_and_stop = ( + row_start is not None + and row_end is not None + and transient_value is not None + ) + + if timeseries_id is not None: + timeseries = all_timeseries[timeseries_id] + return [ + (time, value - steady_value) + for time, value in zip(timeseries["time_start"], timeseries[variable]) + ] + elif start_and_stop: + return [(row_start, transient_value - steady_value), (row_end, 0.0)] + else: + return [(time_start), 0.0] + + def on_transient_changed(self, transient: bool): + if len(self.transient_columns) == 0: + return + + config = self.timml_layer.attributeTableConfig() + columns = config.columns() + + for i, column in enumerate(columns): + if column.name in self.transient_columns: + config.setColumnHidden(i, not transient) + + self.timml_layer.setAttributeTableConfig(config) + return + + def to_ttim(self, other=None): + # Check timml input and timeseries input first. + timml_errors, data = self.to_timml(other) + timeseries = self.table_to_dict(self.ttim_layer) + timeseries_errors = self.schema.validate_timeseries(timeseries) + errors = timml_errors + timeseries_errors + if errors: + return errors, None + + other = other.copy() + other["timeseries_ids"] = [row["timeseries_id"] for row in timeseries] + errors = self.schema.validate_ttim(data, other) + if errors: + return errors, None + + grouped = self.groupby(timeseries, "timeseries_id") + elements = [self.process_ttim_row(row, grouped) for row in data] + return None, elements + class AssociatedElement(Element): """ diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index cc6a00d..fd220df 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -4,12 +4,12 @@ from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllOrNone, + AllRequired, Membership, NotBoth, Optional, Positive, Required, - Time, ) @@ -24,11 +24,15 @@ class HeadLineSinkSchema(ElementSchema): ttim_consistency_schemata = ( AllOrNone("time_start", "time_end", "head_transient"), NotBoth("time_start", "timeseries_id"), - Time(), ) ttim_schemata = { - "time_start": Optional(Time()), - "time_end": Optional(Time()), + "time_start": Optional(Positive()), + "time_end": Optional(Positive()), + } + timeseries_schemata = { + "timeseries_id": AllRequired(), + "time_start": AllRequired(Positive()), + "head": AllRequired(), } diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 39edd28..24d86c7 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -4,7 +4,14 @@ from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import BLUE from qgistim.core.elements.element import ElementSchema, TransientElement -from qgistim.core.schemata import AllOrNone, Membership, NotBoth, Positive, Required +from qgistim.core.schemata import ( + AllOrNone, + AllRequired, + Membership, + NotBoth, + Positive, + Required, +) class HeadWellSchema(ElementSchema): @@ -18,6 +25,11 @@ class HeadWellSchema(ElementSchema): AllOrNone(("time_start", "time_end", "head_transient")), NotBoth("time_start", "timeseries_id"), ) + timeseries_schemata = { + "timeseries_id": AllRequired(), + "time_start": AllRequired(Positive()), + "head": AllRequired(), + } class HeadWell(TransientElement): diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py index 1f5c8be..a4e9ddf 100644 --- a/plugin/qgistim/core/elements/impermeable_line_doublet.py +++ b/plugin/qgistim/core/elements/impermeable_line_doublet.py @@ -36,3 +36,19 @@ def process_timml_row(self, row): "order": row["order"], "label": row["label"], } + + def to_ttim(self, other): + # TTim doesn't have an ImpermeableLineDoublet, we need to add "imp" as + # the resistance entry. + data = self.to_timml(other) + out = [] + for row in data: + out.append( + { + "xy": row["xy"], + "res": "imp", + "order": row["order"], + "label": row["label"], + } + ) + return out diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index fb8667e..d3a709b 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -4,12 +4,12 @@ from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllOrNone, + AllRequired, Membership, NotBoth, Optional, Positive, Required, - Time, ) @@ -24,11 +24,15 @@ class LineSinkDitchSchema(ElementSchema): ttim_consistency_schemata = ( AllOrNone("time_start", "time_end", "discharge_transient"), NotBoth("time_start", "timeseries_id"), - Time(), ) ttim_schemata = { - "time_start": Optional(Time()), - "time_end": Optional(Time()), + "time_start": Optional(Positive()), + "time_end": Optional(Positive()), + } + timeseries_schemata = { + "timeseries_id": AllRequired(), + "time_start": AllRequired(Positive()), + "discharge": AllRequired(), } diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index 0dbd97d..a04c29c 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -2,13 +2,17 @@ from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import LIGHT_BLUE from qgistim.core.elements.element import ElementSchema, TransientElement -from qgistim.core.schemata import Required +from qgistim.core.schemata import AllRequired, Positive, Required class ObservationSchema(ElementSchema): timml_schemata = { "geometry": Required(), } + timeseries_schemata = { + "timeseries_id": AllRequired(), + "time": AllRequired(Positive()), + } class Observation(TransientElement): diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py index 691c628..b2facdd 100644 --- a/plugin/qgistim/core/elements/polygon_inhomogeneity.py +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -4,11 +4,10 @@ from qgistim.core.elements.element import AssociatedElement, ElementSchema from qgistim.core.schemata import ( AllGreaterEqual, - AllOptional, AllRequired, - FirstOnly, Membership, OffsetAllRequired, + OptionalFirstOnly, Positive, Range, Required, @@ -33,8 +32,8 @@ class AssociatedPolygonInhomogeneitySchema(ElementSchema): "aquifer_bottom": AllRequired(StrictlyDecreasing()), "aquitard_c": OffsetAllRequired(Positive()), "aquifer_k": AllRequired(Positive()), - "semiconf_top": AllOptional(FirstOnly()), - "semiconf_head": AllOptional(FirstOnly()), + "semiconf_top": OptionalFirstOnly(), + "semiconf_head": OptionalFirstOnly(), } timml_consistency_schemata = ( SemiConfined(), diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index b59c2c8..06b13a6 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -6,12 +6,12 @@ from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( AllOrNone, + AllRequired, Membership, NotBoth, Optional, Positive, Required, - Time, ) @@ -26,14 +26,19 @@ class WellSchema(ElementSchema): ttim_schemata = { "caisson_radius": Required(Positive), "slug": Required(), - "time_start": Optional(Time()), - "time_end": Optional(Time()), + "time_start": Optional(Positive()), + "time_end": Optional(Positive()), "timeseries_id": Optional(Membership("timeseries_ids")), } consistency_schemata = ( AllOrNone(("time_start", "time_end", "discharge_transient")), NotBoth("time_start", "timeseries_id"), ) + timeseries_schemata = { + "timeseries_id": AllRequired(), + "time_start": AllRequired(Positive()), + "discharge": AllRequired(), + } class Well(TransientElement): diff --git a/plugin/qgistim/core/extractor.py b/plugin/qgistim/core/extractor.py index 65909ba..3cfd0a9 100644 --- a/plugin/qgistim/core/extractor.py +++ b/plugin/qgistim/core/extractor.py @@ -21,7 +21,8 @@ def extract_coordinates(feature): coordinates = [] for vertex in geometry.vertices(): coordinates.append((vertex.x(), vertex.y())) - return coordinates + centroid = geometry.centroid().asPoint() + return (centroid.x(), centroid.y()), coordinates @classmethod def to_dict(cls, layer) -> List[Dict[str, Any]]: @@ -31,14 +32,13 @@ def to_dict(cls, layer) -> List[Dict[str, Any]]: for key, value in data.items(): if value == NULL: data[key] = None - data["geometry"] = cls.extract_coordinates(feature) + data["centroid"], data["geometry"] = cls.extract_coordinates(feature) features.append(data) return features def table_to_dict(cls, layer) -> Dict[str, Any]: features = defaultdict(list) for feature in layer.getFeatures(): - features["geometry"].append(cls.extract_coordinates(feature)) for key, value in feature.attributeMap().items(): if value == NULL: features[key].append(None) diff --git a/plugin/qgistim/core/formatting.py b/plugin/qgistim/core/formatting.py index b98fb7b..a0f0b80 100644 --- a/plugin/qgistim/core/formatting.py +++ b/plugin/qgistim/core/formatting.py @@ -155,6 +155,21 @@ def to_script_string(data: Dict[str, Any]) -> str: def to_json(data: Dict[str, Any]) -> Dict[str, Any]: + """ + Take the data and add: + + * the TimML type + * the layer name + + Parameters + ---------- + data: Dict[str, Any] + + Returns + ------- + json_data: Dict[str, Any] + Data ready to dump to JSON. + """ data = data.copy() # avoid side-effects aquifer_data = data.pop("timml Aquifer:Aquifer") domain_data = data.pop("timml Domain:Domain") diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index ad9ddf6..e0ce5c4 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -13,15 +13,14 @@ MaybeError = Union[None, str] -MaybeErrorList = Union[None, List[str]] +ErrorList = List[str] def format(data) -> str: return ", ".join(map(str, data)) -def discard_none(data): - return [v for v in data if v is not None] +# Base classes class BaseSchema(abc.ABC): @@ -34,85 +33,106 @@ def __init__(self): def validate(self, data, other) -> MaybeError: pass - def validate_many(self, data, other) -> MaybeErrorList: + def validate_many(self, data, other) -> ErrorList: errors = [] for value in data: error = self.validate(value, other) if error is not None: errors.append(error) - if errors: - return errors - else: - return None + return errors -class IterableSchema(BaseSchema, abc.ABC): +class IterableSchema(abc.ABC): """Base class for collection of values.""" - def validate_many(self, data, other) -> MaybeErrorList: + def __init__(self, *schemata): + self.schemata = schemata + + def validate_many(self, data, other) -> ErrorList: error = self.validate(data, other) if error: - if isinstance(error, list): - return error - else: - return [error] + return [error] else: - return None + return [] -class SchemaContainer(IterableSchema): +class SchemaContainer(abc.ABC): def __init__(self, *schemata): self.schemata = schemata - def _validate_schemata(self, data, other=None) -> MaybeErrorList: + @abc.abstractmethod + def validate(self): + pass + + def _validate_schemata(self, data, other=None) -> ErrorList: errors = [] for schema in self.schemata: - if isinstance(data, list): - _errors = schema.validate_many(data, other) - if _errors: - errors.extend(_errors) - else: - _error = schema.validate(data, other) - if _error: - errors.append(_error) - - if errors: - return errors + _error = schema.validate(data, other) + if _error: + errors.append(_error) + return errors - return None +class IterableSchemaContainer(abc.ABC): + def __init__(self, *schemata): + self.schemata = schemata + + @abc.abstractmethod + def validate(self): + pass -# SchemataContainer for a single value. + def _validate_schemata(self, data, other=None) -> ErrorList: + errors = [] + for schema in self.schemata: + _errors = schema.validate_many(data, other) + if _errors: + errors.extend(_errors) + return errors + + +# Schema containers for a single value. + + +class OptionalFirstOnly(SchemaContainer): + """Exclusively the first value must be provided.""" + + def validate(self, data, other=None) -> ErrorList: + if any(v is not None for v in data[1:]): + return ["Only the first value may be filled in."] + elif data[0] is None: + return [] + else: + return self._validate_schemata(data[0], other) class Optional(SchemaContainer): - def validate(self, data, other=None) -> MaybeError: + def validate(self, data, other=None) -> ErrorList: if data is None: - return None + return [] return self._validate_schemata(data, other) class Required(SchemaContainer): - def validate(self, data, other=None) -> MaybeError: + def validate(self, data, other=None) -> ErrorList: if data is None: - return "a value is required." + return ["a value is required."] return self._validate_schemata(data, other) # SchemataContainer for multiple values. -class AllRequired(SchemaContainer): - def validate(self, data, other=None) -> MaybeErrorList: +class AllRequired(IterableSchemaContainer): + def validate(self, data, other=None) -> ErrorList: missing = [i + 1 for i, v in enumerate(data) if v is None] if missing: return [f"No values provided at row(s): {format(missing)}"] return self._validate_schemata(data, other) -class OffsetAllRequired(SchemaContainer): - def validate(self, data, other=None) -> MaybeErrorList: +class OffsetAllRequired(IterableSchemaContainer): + def validate(self, data, other=None) -> ErrorList: missing = [i + 2 for i, v in enumerate(data[1:]) if v is None] if missing: return [f"No values provided at row(s): {format(missing)}"] @@ -122,11 +142,11 @@ def validate(self, data, other=None) -> MaybeErrorList: return self._validate_schemata(data, other) -class AllOptional(SchemaContainer): - def validate(self, data, other=None) -> MaybeErrorList: +class AllOptional(IterableSchemaContainer): + def validate(self, data, other=None) -> ErrorList: missing = [i + 1 for i, v in enumerate(data) if v is None] if len(missing) == len(data): - return None + return [] return self._validate_schemata(data, other) @@ -140,15 +160,6 @@ def validate(self, data, _=None) -> MaybeError: return None -class Time(BaseSchema): - def validate(self, data, other=None) -> MaybeError: - start = other["start"] - end = other["end"] - if not (start < data < end): - return f"time does not fall in model time window: {start} to {end}" - return None - - class AllOrNone(BaseSchema): def __init__(self, *variables: Sequence[str]): self.variables = variables @@ -164,19 +175,6 @@ def validate(self, data, _=None) -> MaybeError: return None -class Xor(BaseSchema): - "One or the other should be provided" - - def __init__(self, x: str, y: str): - self.x = x - self.y = y - - def validate(self, data, _=None) -> MaybeError: - if not ((data[self.x] is None) ^ (data[self.y] is None)): - return f"Either {self.x} or {self.y} should be provided, not both." - return None - - class NotBoth(BaseSchema): def __init__(self, x: str, y: str): self.x = x @@ -203,6 +201,26 @@ def validate(self, data, other=None) -> MaybeError: return None +class CircularGeometry(BaseSchema): + def validate(self, data, _=None) -> MaybeError: + coordinates = data + # Compute centroid. + n_vertex = len(data) + x_mean = 0.0 + y_mean = 0.0 + for x, y in coordinates: + x_mean += x / n_vertex + y_mean += y / n_vertex + # Compute distances to centroid. + distances = [(x - x_mean) ** 2 + (y - y_mean) ** 2 for (x, y) in coordinates] + min_distance = min(distances) ** 0.5 + max_distance = max(distances) ** 0.5 + # Accept 1% deviation in squared distance from a circle. + if (max_distance - min_distance) > (0.01 * min_distance): + return "Geometry is not circular." + return None + + # Schemata for a collection of values. @@ -260,19 +278,10 @@ def validate(self, data, _=None) -> MaybeError: return None -class FirstOnly(SchemaContainer): - """Exclusively the first value must be provided.""" - - def validate(self, data, other=None) -> Union[MaybeError, MaybeErrorList]: - if any(v is not None for v in data[1:]): - return "Only the first value may be filled in." - return self._validate_schemata(data[0], other) - - # Consistency schemata -class SemiConfined(IterableSchema): +class SemiConfined(BaseSchema): def validate(self, data, _=None) -> MaybeError: semiconf_data = { "aquitard_c": data["aquitard_c"][0], @@ -294,7 +303,7 @@ def validate(self, data, _=None) -> MaybeError: return None -class SingleRow(IterableSchema): +class SingleRow(BaseSchema): def validate(self, data, _=None) -> MaybeError: if len(data) != 1: return "Table may contain only one row." diff --git a/test_plugin/test_aquifer_schemata.py b/test_plugin/test_aquifer_schemata.py index 44fddb1..7691664 100644 --- a/test_plugin/test_aquifer_schemata.py +++ b/test_plugin/test_aquifer_schemata.py @@ -15,12 +15,12 @@ def test_validate(self): "semiconf_top": [None], "semiconf_head": [None], } - self.assertIsNone(schema.validate_timml(data)) + self.assertEqual(schema.validate_timml(data), {}) def test_validate_empty(self): schema = AquiferSchema() data = {} - self.assertEqual(schema.validate_timml(data), {"table": ["Table is empty."]}) + self.assertEqual(schema.validate_timml(data), {"Table:": ["Table is empty."]}) def test_validate_two_layer(self): schema = AquiferSchema() @@ -33,7 +33,7 @@ def test_validate_two_layer(self): "semiconf_top": [None], "semiconf_head": [None], } - self.assertIsNone(schema.validate_timml(data)) + self.assertEqual(schema.validate_timml(data), {}) def test_validate_two_layer_invalid(self): schema = AquiferSchema() @@ -61,7 +61,7 @@ def test_validate_two_layer_consistency(self): "semiconf_head": [None], } expected = { - "table": [ + "Table:": [ "aquifer_top is not greater or equal to aquifer_bottom at row(s): 1, 2" ] } diff --git a/test_plugin/test_schemata.py b/test_plugin/test_schemata.py index 969a7fa..cc597e6 100644 --- a/test_plugin/test_schemata.py +++ b/test_plugin/test_schemata.py @@ -1,18 +1,17 @@ from unittest import TestCase -from qgistim.core import schemata from qgistim.core.schemata import ( AllGreaterEqual, AllOptional, AllOrNone, AllRequired, Decreasing, - FirstOnly, Increasing, Membership, NotBoth, OffsetAllRequired, Optional, + OptionalFirstOnly, Positive, Range, Required, @@ -23,15 +22,6 @@ ) -class TestDiscardNone(TestCase): - def test_discard(self): - actual = schemata.discard_none([1, 2]) - self.assertTrue(actual == [1, 2]) - - actual = schemata.discard_none([None, 1, None, 2, None]) - self.assertTrue(actual == [1, 2]) - - class TestPositive(TestCase): def test_positive(self): self.assertEqual(Positive().validate(-1), "Non-positive value: -1") @@ -41,18 +31,18 @@ def test_positive(self): class TestOptional(TestCase): def test_optional(self): - self.assertIsNone(Optional(Positive()).validate(None)) - self.assertIsNone(Optional(Positive()).validate(0)) - self.assertIsNone(Optional(Positive()).validate(1)) + self.assertEqual(Optional(Positive()).validate(None), []) + self.assertEqual(Optional(Positive()).validate(0), []) + self.assertEqual(Optional(Positive()).validate(1), []) self.assertEqual(Optional(Positive()).validate(-1), ["Non-positive value: -1"]) class TestRequired(TestCase): def test_required(self): - self.assertEqual(Required(Positive()).validate(None), "a value is required.") + self.assertEqual(Required(Positive()).validate(None), ["a value is required."]) self.assertEqual(Required(Positive()).validate(-1), ["Non-positive value: -1"]) - self.assertIsNone(Required(Positive()).validate(0)) - self.assertIsNone(Required(Positive()).validate(1)) + self.assertEqual(Required(Positive()).validate(0), []) + self.assertEqual(Required(Positive()).validate(1), []) class TestAllOrNone(TestCase): @@ -98,7 +88,7 @@ def test_membership(self): class TestAllRequired(TestCase): def test_all_required(self): schema = AllRequired(Positive()) - self.assertIsNone(schema.validate([1, 2, 3])) + self.assertEqual(schema.validate([1, 2, 3]), []) self.assertEqual( schema.validate([None, 2, None]), ["No values provided at row(s): 1, 3"] ) @@ -108,7 +98,7 @@ def test_all_required(self): class TestOffsetAllRequired(TestCase): def test_offset_all_required(self): schema = OffsetAllRequired(Positive()) - self.assertIsNone(schema.validate([None, 2, 3])) + self.assertEqual(schema.validate([None, 2, 3]), []) self.assertEqual( schema.validate([None, 2, None]), ["No values provided at row(s): 3"] ) @@ -118,22 +108,22 @@ def test_offset_all_required(self): class TestAllOptional(TestCase): def test_all_optional(self): schema = AllOptional(Positive()) - self.assertIsNone(schema.validate([None, None, None])) - self.assertIsNone(schema.validate([1, 2, 3])) + self.assertEqual(schema.validate([None, None, None]), []) + self.assertEqual(schema.validate([1, 2, 3]), []) self.assertEqual(schema.validate([-1, 2, 3]), ["Non-positive value: -1"]) def test_all_optional_first_only(self): - schema = AllOptional(FirstOnly()) - self.assertIsNone(schema.validate([None, None, None])) - self.assertIsNone(schema.validate([1, None, None])) + schema = OptionalFirstOnly() + self.assertEqual(schema.validate([None, None, None]), []) + self.assertEqual(schema.validate([1, None, None]), []) self.assertEqual( schema.validate([1, 1, None]), ["Only the first value may be filled in."] ) def test_all_optional_first_only_positive(self): - schema = AllOptional(FirstOnly(Positive())) - self.assertIsNone(schema.validate([None, None, None])) - self.assertIsNone(schema.validate([1, None, None])) + schema = OptionalFirstOnly(Positive()) + self.assertEqual(schema.validate([None, None, None]), []) + self.assertEqual(schema.validate([1, None, None]), []) self.assertEqual( schema.validate([1, 1, None]), ["Only the first value may be filled in."] ) @@ -209,13 +199,13 @@ def test_all_greater_equal(self): self.assertEqual(schema.validate(d), expected) -class TestFirstOnly(TestCase): +class TestOptionalFirstOnly(TestCase): def test_first_only(self): - schema = FirstOnly() - self.assertIsNone(schema.validate([None, None])) - self.assertIsNone(schema.validate([1, None])) + schema = OptionalFirstOnly() + self.assertEqual(schema.validate([None, None]), []) + self.assertEqual(schema.validate([1, None]), []) self.assertEqual( - schema.validate([1, 1]), "Only the first value may be filled in." + schema.validate([1, 1]), ["Only the first value may be filled in."] ) From 5aadd0b13fa78cd0e162baef6871f2b726129167 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Sun, 17 Sep 2023 17:43:17 +0200 Subject: [PATCH 09/41] schemata type hints --- plugin/qgistim/core/elements/element.py | 15 ++++++++++----- plugin/qgistim/core/schemata.py | 8 ++++++-- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 84a2008..9fdac0a 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -72,14 +72,19 @@ ) from qgistim.core import geopackage from qgistim.core.extractor import ExtractorMixin +from qgistim.core.schemata import ( + ConsistencySchema, + IterableSchemaContainer, + SchemaContainer, +) class ElementSchema(abc.ABC): - timml_schemata: Dict[str, Any] = {} - timml_consistency_schemata: Tuple[Any] = () - ttim_schemata: Dict[str, Any] = {} - ttim_consistency_schemata: Tuple[Any] = () - timeseries_schemata: Dict[str, Any] = {} + timml_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} + timml_consistency_schemata: Tuple[ConsistencySchema] = () + ttim_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} + ttim_consistency_schemata: Tuple[ConsistencySchema] = () + timeseries_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} @staticmethod def _validate_rows(schemata, consistency_schemata, data, other=None): diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index e0ce5c4..5ff6a81 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -43,6 +43,10 @@ def validate_many(self, data, other) -> ErrorList: return errors +class ConsistencySchema(BaseSchema, abc.ABC): + pass + + class IterableSchema(abc.ABC): """Base class for collection of values.""" @@ -281,7 +285,7 @@ def validate(self, data, _=None) -> MaybeError: # Consistency schemata -class SemiConfined(BaseSchema): +class SemiConfined(ConsistencySchema): def validate(self, data, _=None) -> MaybeError: semiconf_data = { "aquitard_c": data["aquitard_c"][0], @@ -303,7 +307,7 @@ def validate(self, data, _=None) -> MaybeError: return None -class SingleRow(BaseSchema): +class SingleRow(ConsistencySchema): def validate(self, data, _=None) -> MaybeError: if len(data) != 1: return "Table may contain only one row." From c495a8996bdeb02d2ebcf61a53f17d58bd3d4e87 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Sun, 17 Sep 2023 18:09:51 +0200 Subject: [PATCH 10/41] start with polygon inhom and area sink --- plugin/qgistim/core/elements/element.py | 26 +++++++++-- .../core/elements/polygon_area_sink.py | 46 +++++++++++++++++++ .../core/elements/polygon_inhomogeneity.py | 33 +++++-------- plugin/qgistim/widgets/dataset_widget.py | 4 +- 4 files changed, 82 insertions(+), 27 deletions(-) diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 9fdac0a..fa4db13 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -80,6 +80,7 @@ class ElementSchema(abc.ABC): + # TODO: check for presence of columns timml_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} timml_consistency_schemata: Tuple[ConsistencySchema] = () ttim_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} @@ -375,8 +376,6 @@ def interleave(a, b): if "resistance" in data: aquifer["res"] = data["resistance"][0] - if "rate" in data: - aquifer["N"] = data["rate"][0] return aquifer @@ -462,12 +461,12 @@ def to_ttim(self, other=None): timml_errors, data = self.to_timml(other) timeseries = self.table_to_dict(self.ttim_layer) timeseries_errors = self.schema.validate_timeseries(timeseries) - errors = timml_errors + timeseries_errors + errors = {**timml_errors, **timeseries_errors} if errors: return errors, None other = other.copy() - other["timeseries_ids"] = [row["timeseries_id"] for row in timeseries] + other["timeseries_ids"] = set([row["timeseries_id"] for row in timeseries]) errors = self.schema.validate_ttim(data, other) if errors: return errors, None @@ -514,3 +513,22 @@ def write(self): def remove_from_geopackage(self): geopackage.remove_layer(self.path, self.timml_name) geopackage.remove_layer(self.path, self.assoc_name) + + def to_timml(self, other): + properties = self.table_to_dict(self.assoc_layer) + errors = self.assoc_schema.validate_timml(properties) + if errors: + return errors, None + + other = other.copy() + other["inhomogeneity_ids"] = set([row["timeseries_id"] for row in properties]) + errors, data = super().to_timml(other) + if errors: + return errors, None + + grouped = self.groupby(properties, "inhomogeneity_id") + elements = [self.process_timml_row(row, grouped) for row in data] + return None, elements + + def to_ttim(self, _): + raise NotImplementedError(f"{type(self).__name__} is not supported in TTim.") diff --git a/plugin/qgistim/core/elements/polygon_area_sink.py b/plugin/qgistim/core/elements/polygon_area_sink.py index e69de29..6fe08dd 100644 --- a/plugin/qgistim/core/elements/polygon_area_sink.py +++ b/plugin/qgistim/core/elements/polygon_area_sink.py @@ -0,0 +1,46 @@ +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import GREEN, TRANSPARENT_GREEN +from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.schemata import ( + Positive, + Required, +) + + +class PolygonAreaSinkSchema(ElementSchema): + timml_schemata = { + "rate": Required(), + "order": Required(Positive()), + "ndegrees": Required(Positive()), + } + + +class PolygonAreaSink(Element): + element_type = "Polygon Area Sink" + geometry_type = "Polygon" + timml_attributes = ( + QgsField("rate", QVariant.Double), + QgsField("order", QVariant.Int), + QgsField("ndegrees", QVariant.Int), + ) + timml_defaults = { + "order": QgsDefaultValue("4"), + "ndegrees": QgsDefaultValue("6"), + } + schema = PolygonAreaSinkSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" + ) + + def process_timml_row(self, row, other): + return { + "xy": self._polygon_xy(row), + "rate": row["rate"], + "order": row["order"], + "ndeg": row["ndegrees"], + **other["global_aquifer"], + } diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py index b2facdd..349d560 100644 --- a/plugin/qgistim/core/elements/polygon_inhomogeneity.py +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -1,3 +1,5 @@ +from typing import Any, Dict + from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREY, TRANSPARENT_GREY @@ -34,6 +36,7 @@ class AssociatedPolygonInhomogeneitySchema(ElementSchema): "aquifer_k": AllRequired(Positive()), "semiconf_top": OptionalFirstOnly(), "semiconf_head": OptionalFirstOnly(), + "rate": OptionalFirstOnly(), } timml_consistency_schemata = ( SemiConfined(), @@ -41,9 +44,6 @@ class AssociatedPolygonInhomogeneitySchema(ElementSchema): ) -# TODO: each group should have equal length - - class PolygonInhomogeneity(AssociatedElement): element_type = "Polygon Inhomogeneity" geometry_type = "Polygon" @@ -101,22 +101,11 @@ def on_transient_changed(self, transient: bool): self.assoc_layer.setAttributeTableConfig(config) return - def to_timml(self): - data = self.to_dict(self.timml_layer) - assoc_data = self.table_to_dict(self.assoc_layer) - timml_errors = self.schema.validate_timml(data) - assoc_errors = self.assoc_schema.validate_timml(assoc_data) - errors = {**timml_errors, **assoc_errors} - if errors: - return errors, None - else: - grouped = self.groupby(assoc_data, "inhomogeneity_id") - inhoms = [] - for row in data: - inhom_id = row["inhomogeneity_id"] - kwargs = self._aquifer_data(grouped[inhom_id]) - kwargs["xy"] = self._polygon_xy(row) - kwargs["order"] = row["order"] - kwargs["ndeg"] = row["ndegrees"] - inhoms.append(kwargs) - return inhoms + def process_timml_row(self, row: Dict[str, Any], grouped: Dict[int, Any]): + inhom_id = row["inhomogeneity_id"] + return { + "xy": self._polygon_xy(row), + "order": row["order"], + "ndeg": row["ndegrees"], + **self._aquifer_data(grouped[inhom_id]), + } diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 5127547..b1ec38e 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -216,7 +216,9 @@ def convert_to_timml(self): if _errors: errors[name] = _errors else: - data[name] = aquifer.aquifer_data(raw_data, transient=False) + aquifer_data = aquifer.aquifer_data(raw_data, transient=False) + data[name] = aquifer_data + other["global_aquifer"] = aquifer_data for name, element in elements.items(): _errors, _data = element.to_timml(other) From 0a260e7a5d3cd07d97c9d9e5a8a5d5da3f1384cc Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Mon, 18 Sep 2023 15:22:15 +0200 Subject: [PATCH 11/41] All elements Start removing gistim logic --- gistim/__init__.py | 52 --- gistim/__main__.py | 19 +- gistim/common.py | 383 ------------------ gistim/compute.py | 107 ++++- plugin/qgistim/core/elements/__init__.py | 17 +- plugin/qgistim/core/elements/building_pit.py | 96 +++++ .../core/elements/circular_area_sink.py | 2 +- plugin/qgistim/core/elements/constant.py | 2 +- plugin/qgistim/core/elements/element.py | 41 +- .../qgistim/core/elements/head_line_sink.py | 2 +- plugin/qgistim/core/elements/headwell.py | 2 +- .../core/elements/impermeable_line_doublet.py | 2 +- .../core/elements/leaky_building_pit.py | 103 +++++ .../core/elements/leaky_line_doublet.py | 2 +- .../qgistim/core/elements/line_sink_ditch.py | 2 +- plugin/qgistim/core/elements/observation.py | 2 +- .../core/elements/polygon_area_sink.py | 18 +- .../core/elements/polygon_inhomogeneity.py | 32 +- .../elements/polygon_semi_confined_top.py | 53 +++ plugin/qgistim/core/elements/uniform_flow.py | 2 +- plugin/qgistim/core/elements/well.py | 2 +- plugin/qgistim/core/formatting.py | 4 +- plugin/qgistim/core/schemata.py | 19 +- plugin/qgistim/widgets/compute_widget.py | 20 +- plugin/qgistim/widgets/dataset_widget.py | 112 ++--- plugin/qgistim/widgets/elements_widget.py | 9 +- 26 files changed, 524 insertions(+), 581 deletions(-) delete mode 100644 gistim/common.py create mode 100644 plugin/qgistim/core/elements/polygon_semi_confined_top.py diff --git a/gistim/__init__.py b/gistim/__init__.py index 83d8b71..5bc16c3 100644 --- a/gistim/__init__.py +++ b/gistim/__init__.py @@ -4,7 +4,6 @@ import pkg_resources from gistim import timml_elements, ttim_elements -from gistim.common import gridspec, model_specification from gistim.compute import compute_steady, compute_transient from gistim.data_extraction import as_aquifer_aquitard, layer_statistics from gistim.ugrid import to_ugrid2d @@ -15,54 +14,3 @@ except pkg_resources.DistributionNotFound: # package is not installed pass - - -def convert_to_script(inpath: str, outpath: str) -> None: - timml_spec, ttim_spec = model_specification(inpath, {}) - timml_script = timml_elements.convert_to_script(timml_spec) - try: - ttim_script = ttim_elements.convert_to_script(ttim_spec) - except Exception: - ttim_script = "" - - with open(outpath, "w") as f: - f.write(timml_script) - f.write("\n") - f.write(ttim_script) - - -def compute( - inpath: Union[pathlib.Path, str], - outpath: Union[pathlib.Path, str], - mode: str, - cellsize: float, - active_elements: Dict[str, bool], -) -> None: - """ - Compute the results of TimML model. - - The model is fully specified by the GeoPacakge dataset in the path. - - The extent of the head grids is read from a vector layer in the - GeoPackage file. - - Parameters - ---------- - path: Union[pathlib.Path, str] - Path to the GeoPackage file containing the full model input. - cellsize: float - Grid cell size of the computed output - - Returns - ------- - None - The result is written to a netCDF file. Its name is generated from - the geopackage name, and the requested grid cell size. - """ - if mode == "steady-state": - compute_steady(inpath, outpath, cellsize, active_elements) - elif mode == "transient": - compute_transient(inpath, outpath, cellsize, active_elements) - else: - raise ValueError(f'Invalid mode: {mode}: should be "steady" or "transient".') - return diff --git a/gistim/__main__.py b/gistim/__main__.py index 9fc1646..a52f2c5 100644 --- a/gistim/__main__.py +++ b/gistim/__main__.py @@ -163,13 +163,18 @@ def compute(args) -> None: with open(jsonpath, "r") as f: data = json.loads(f.read()) - gistim.compute( - inpath=data["inpath"], - outpath=data["outpath"], - cellsize=data["cellsize"], - mode=data["mode"], - active_elements=data["active_elements"], - ) + if data["mode"] == "steady-state": + gistim.compute_steady( + inpath=data["inpath"], + outpath=data["outpath"], + ) + elif data["mode"] == "transient": + gistim.compute_transient( + inpath=data["inpath"], + outpath=data["outpath"], + ) + else: + raise ValueError("Mode should be transient or steady-state.") return diff --git a/gistim/common.py b/gistim/common.py deleted file mode 100644 index dadd633..0000000 --- a/gistim/common.py +++ /dev/null @@ -1,383 +0,0 @@ -""" -Common utilities used by conversions from GeoPackage (GPKG) layers to TimML and -TTim elements. - -The relation between a GPKG layer and the elements is not perfect. A GPGK layer -consists of a table, with optionally an associated geometry for every row. This -matches one to one for elements such as HeadLineSinkStrings, Wells, etc: for -these elements, one row equals one element. - -For elements such as PolygonImhomogenities, this is not the case. Every geometry -(a polygon) requires a table of its own. These tables are stored in associated -tables; their association is by name. - -For transient (TTim) elements, the same is true: elements require an additional -table for their timeseries data, which should require repeating the geometry -for every time step. In this package, we assume that any ttim element is -accompanied by a timml element; the QGIS plugin always sets up the layer ih -that manner. - -When processing a GPKG, we first parse the names, and group the different -tables together in the ElementSpecifications below. The ``timml_elements`` and -``ttim_elements`` then convert these grouped tables into ``timml`` and ``ttim`` -models. -""" -import numbers -import pathlib -import re -from collections import defaultdict -from functools import partial -from typing import Any, Dict, NamedTuple, Tuple, Union - -import fiona -import geopandas as gpd -import numpy as np -import pandas as pd - -FloatArray = np.ndarray - - -def filter_scalar_nan(value: Any) -> Any: - if isinstance(value, numbers.Real) and np.isnan(value): - return None - else: - return value - - -def filter_nan(row: Union[Dict, pd.Series]): - """ - Newer versions of geopandas return NaN rather than None for columns. - TimML & TTim expect None for optional values. - """ - return {k: filter_scalar_nan(v) for k, v in row.items()} - - -class ElementSpecification(NamedTuple): - elementtype: str - active: bool - dataframe: gpd.GeoDataFrame - associated_dataframe: gpd.GeoDataFrame - - -class TransientElementSpecification(NamedTuple): - elementtype: str - active: bool - dataframe: gpd.GeoDataFrame - steady_spec: ElementSpecification - - -class TimmlModelSpecification(NamedTuple): - aquifer: gpd.GeoDataFrame - elements: Dict[str, ElementSpecification] - domain: gpd.GeoDataFrame - - -class TtimModelSpecification(NamedTuple): - aquifer: gpd.GeoDataFrame - temporal_settings: gpd.GeoDataFrame - elements: Dict[str, ElementSpecification] - domain: gpd.GeoDataFrame - output_times: FloatArray - - -# Extract coordinates from geodataframe -# ------------------------------------- -def point_coordinates(dataframe) -> Tuple[FloatArray, FloatArray]: - return dataframe["geometry"].x, dataframe["geometry"].y - - -def remove_zero_length(coords: FloatArray): - dx_dy = np.diff(coords, axis=0) - notzero = (dx_dy != 0).any(axis=1) - keep = np.full(len(coords), True) - keep[1:] = notzero - return coords[keep] - - -def linestring_coordinates(row) -> FloatArray: - return remove_zero_length(np.array(row["geometry"].coords)) - - -def polygon_coordinates(row) -> FloatArray: - return remove_zero_length(np.array(row["geometry"].exterior.coords)) - - -# Parse GPKG content to Tim input -# ------------------------------- -def aquifer_data( - dataframe: gpd.GeoDataFrame, transient: bool = False -) -> Dict[str, Any]: - """ - Convert a table created by the QGIS plugin to the layer configuration and - keywords arguments as expected by TimML or TTim. - """ - # Make sure the layers are in the right order. - dataframe = dataframe.sort_values(by="layer").set_index("layer") - nlayer = len(dataframe) - # Deal with optional semi-confined top layer. - hstar = dataframe.loc[0, "semiconf_head"] - semi = pd.notnull(hstar) - kaq = dataframe["aquifer_k"].values - - if semi: - c = dataframe["aquitard_c"].values - porosity = np.empty(nlayer * 2) - z = np.empty(nlayer * 2 + 1) - z[0] = dataframe.loc[0, "semiconf_top"] - z[1::2] = dataframe["aquifer_top"].values - z[2::2] = dataframe["aquifer_bottom"].values - porosity[::2] = dataframe["aquitard_npor"].values - porosity[1::2] = dataframe["aquifer_npor"].values - topboundary = "semi" - storage_aquifer = dataframe["aquifer_s"].values - storage_aquitard = dataframe["aquitard_s"].values - else: - c = dataframe["aquitard_c"].values[1:] - z = np.empty(nlayer * 2) - z[::2] = dataframe["aquifer_top"].values - z[1::2] = dataframe["aquifer_bottom"].values - porosity = np.empty(nlayer * 2 - 1) - porosity[::2] = dataframe["aquifer_npor"].values - porosity[1::2] = dataframe["aquitard_npor"].values[1:] - topboundary = "conf" - storage_aquifer = dataframe["aquifer_s"].values - storage_aquitard = dataframe["aquifer_s"].values[1:] - - d = { - "kaq": kaq, - "z": z, - "c": c, - "topboundary": topboundary, - } - if transient: - d["Sll"] = storage_aquitard - d["Saq"] = storage_aquifer - # TODO: for now, assume there is always at least one specific yield - # this is the aquifer if conf, the aquitard if semi - d["phreatictop"] = True - else: - d["npor"] = porosity - d["hstar"] = hstar - - # For inhomogeneities - if "rate" in dataframe: - d["N"] = dataframe.loc[0, "rate"] - - # For leaky building pit - if "resistance" in dataframe: - d["res"] = dataframe.loc[0, "resistance"] - - filtered = {} - for k, value in d.items(): - if isinstance(value, np.ndarray): - filtered[k] = [filter_scalar_nan(v) for v in value] - else: - filtered[k] = filter_scalar_nan(value) - - return filtered - - -def parse_name(layername: str) -> Tuple[str, str, str]: - """ - Based on the layer name find out: - - * whether it's a timml or ttim element; - * which element type it is; - * what the user provided name is. - - For example: - parse_name("timml Headwell: drainage") -> ("timml", "Head Well", "drainage") - - Some grouping of tables occurs here. - """ - prefix, name = layername.split(":") - element_type = re.split("timml |ttim ", prefix)[1] - mapping = { - "Computation Times": "Domain", - "Temporal Settings": "Aquifer", - "Polygon Inhomogeneity Properties": "Polygon Inhomogeneity", - "Building Pit Properties": "Building Pit", - } - element_type = mapping.get(element_type, element_type) - if "timml" in prefix: - if "Properties" in prefix: - tim_type = "timml_assoc" - else: - tim_type = "timml" - elif "ttim" in prefix: - tim_type = "ttim" - else: - raise ValueError(f"Neither timml nor ttim in layername: {layername}") - return tim_type, element_type, name - - -def model_specification( - path: Union[str, pathlib.Path], active_elements: Dict[str, bool] -) -> Tuple[TimmlModelSpecification, TtimModelSpecification]: - """ - Group the different layers of a GPKG into model specifications for timml - and ttim. The grouping occurs solely on the basis of layer names. - """ - # Start by listing all layers - gpkg_names = fiona.listlayers(path) - # Group all these names together (using a defaultdict) - dd = defaultdict - grouped_names = dd(partial(dd, partial(dd, list))) - for layername in gpkg_names: - tim_type, element_type, name = parse_name(layername) - grouped_names[element_type][name][tim_type] = layername - - # Grab the names of the required elements and load the data into - # geodataframes. - aquifer_entry = grouped_names.pop("Aquifer")["Aquifer"] - aquifer = gpd.read_file(path, layer=aquifer_entry["timml"]) - temporal_settings = gpd.read_file(path, layer=aquifer_entry["ttim"]) - domain_entry = grouped_names.pop("Domain")["Domain"] - domain = gpd.read_file(path, layer=domain_entry["timml"]) - - if len(domain.index) == 0: - raise ValueError("Domain not defined") - - output_times = gpd.read_file(path, layer=domain_entry["ttim"]) - - # Load the data all other elements into geodataframes. - ttim_elements = {} - timml_elements = {} - for element_type, element_group in grouped_names.items(): - for name, group in element_group.items(): - timml_name = group["timml"] - timml_assoc_name = group.get("timml_assoc", None) - ttim_name = group.get("ttim", timml_name) - - timml_df = gpd.read_file(path, layer=timml_name) - timml_assoc_df = ( - gpd.read_file(path, layer=timml_assoc_name) - if timml_assoc_name is not None - else None - ) - ttim_df = ( - gpd.read_file(path, layer=ttim_name) if ttim_name is not None else None - ) - - # Use the background aquifer for a semi-confined top - if element_type in ("Polygon Area Sink", "Polygon Semi-Confined Top"): - timml_assoc_df = aquifer - - timml_spec = ElementSpecification( - elementtype=element_type, - active=active_elements.get(timml_name, False), - dataframe=timml_df, - associated_dataframe=timml_assoc_df, - ) - - ttim_spec = TransientElementSpecification( - elementtype=element_type, - active=active_elements.get(ttim_name, False), - dataframe=ttim_df, - steady_spec=timml_spec, - ) - timml_elements[timml_name] = timml_spec - ttim_elements[ttim_name] = ttim_spec - - return ( - TimmlModelSpecification(aquifer, timml_elements, domain), - TtimModelSpecification( - aquifer, - temporal_settings, - ttim_elements, - domain, - np.sort(output_times["time"].values), - ), - ) - - -# Three helpers for conversion to Python scripts -# ---------------------------------------------- -def dict_to_kwargs_code(data: dict) -> str: - strings = [] - for key, value in data.items(): - if isinstance(value, np.ndarray): - value = value.tolist() - elif isinstance(value, str) and key not in ("model", "timmlmodel"): - value = f'"{value}"' - strings.append(f"{key}={value}") - return ",".join(strings) - - -def sanitized(name: str) -> str: - return name.split(":")[-1].replace(" ", "_") - - -def headgrid_code(domain: gpd.GeoDataFrame) -> Tuple[str, str]: - xmin, ymin, xmax, ymax = domain.bounds.iloc[0] - dy = (ymax - ymin) / 50.0 - if dy > 500.0: - dy = round(dy / 500.0) * 500.0 - elif dy > 50.0: - dy = round(dy / 50.0) * 50.0 - elif dy > 5.0: # round to five - dy = round(dy / 5.0) * 5.0 - elif dy > 1.0: - dy = round(dy) - (xmin, xmax, ymin, ymax) = round_extent((xmin, xmax, ymin, ymax), dy) - xmin += 0.5 * dy - xmax += 0.5 * dy - ymax -= 0.5 * dy - xmin -= 0.5 * dy - xg = f"np.arange({xmin}, {xmax}, {dy})" - yg = f"np.arange({ymax}, {ymin}, -{dy})" - return xg, yg - - -# Output methods -# -------------- -def round_extent(extent: Tuple[float], cellsize: float) -> Tuple[float]: - """ - Increases the extent until all sides lie on a coordinate - divisible by cellsize. - - Parameters - ---------- - extent: Tuple[float] - xmin, xmax, ymin, ymax - cellsize: float - Desired cell size of the output head grids - - Returns - ------- - extent: Tuple[float] - xmin, xmax, ymin, ymax - """ - xmin, xmax, ymin, ymax = extent - xmin = np.floor(xmin / cellsize) * cellsize - ymin = np.floor(ymin / cellsize) * cellsize - xmax = np.ceil(xmax / cellsize) * cellsize - ymax = np.ceil(ymax / cellsize) * cellsize - return xmin, xmax, ymin, ymax - - -def gridspec( - path: Union[pathlib.Path, str], cellsize: float -) -> Tuple[Tuple[float], Any]: - """ - Infer the grid specification from the geopackage ``timmlDomain`` layer and - the provided cellsize. - - Parameters - ---------- - path: Union[pathlib.Path, str] - Path to the GeoPackage file. - cellsize: float - Desired cell size of the output head grids - - Returns - ------- - extent: tuple[float] - xmin, xmax, ymin, ymax - crs: Any - Coordinate Reference System - """ - domain = gpd.read_file(path, layer="timml Domain:Domain") - xmin, ymin, xmax, ymax = domain.bounds.iloc[0] - extent = (xmin, xmax, ymin, ymax) - return round_extent(extent, cellsize), domain.crs diff --git a/gistim/compute.py b/gistim/compute.py index 6f87ca3..3cae047 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -1,12 +1,31 @@ +import json import pathlib from typing import Dict, Union import geopandas as gpd import numpy as np import rioxarray # noqa # pylint: disable=unused-import +import timml +import tqdm +import ttim import xarray as xr -import gistim +from gistim.ugrid import to_ugrid2d + +TIMML_MAPPING = { + "Constant": timml.Constant, + "Uflow": timml.Uflow, + "CircAreaSink": timml.CircAreaSink, + "Well": timml.Well, + "HeadWell": timml.HeadWell, + "PolygonInhom": timml.PolygonInhomMaq, + "HeadLineSinkString": timml.HeadLineSinkString, + "LineSinkDitchString": timml.LineSinkDitchString, + "LeakyLineDoubletString": timml.LeakyLineDoubletString, + "ImpLineDoubletString": timml.ImpLineDoubletString, + "BuildingPit": timml.BuildingPit, + "LeakyBuildingPit": timml.LeakyBuildingPit, +} def write_raster( @@ -24,7 +43,7 @@ def write_ugrid( crs: int, outpath: Union[pathlib.Path, str], ) -> None: - ugrid_head = gistim.to_ugrid2d(head) + ugrid_head = to_ugrid2d(head) ugrid_head["projected_coordinate_system"] = xr.DataArray( data=np.int32(0), attrs={"epsg": np.int32(crs.to_epsg())}, @@ -49,22 +68,92 @@ def write_vector( return +def initialize_timml(data): + aquifer = data.pop("ModelMaq") + timml_model = timml.ModelMaq(**aquifer) + for elementtype, entry in data.items(): + klass = TIMML_MAPPING[elementtype] + for kwargs in entry["data"]: + element = klass(model=timml_model, **kwargs) + return timml_model + + +def timml_head_observations(model: timml.Model, observations: Dict) -> gpd.GeoDataFrame: + geodataframes = {} + for name, kwargs_collection in observations.items(): + heads = [] + xx = [] + yy = [] + labels = [] + for kwargs in kwargs_collection: + x = kwargs["x"] + y = kwargs["y"] + heads.append(model.head(x=x, y=y)) + xx.append(x) + yy.append(y) + labels.append(kwargs["label"]) + + d = {"geometry": gpd.points_from_xy(xx, yy), "label": labels} + for i, layerhead in enumerate(np.vstack(heads).T): + d[f"head_layer{i}"] = layerhead + + geodataframes[name] = gpd.GeoDataFrame(d) + + return geodataframes + + +def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.DataArray: + """ + Compute the headgrid of the TimML model, and store the results + in an xarray DataArray with the appropriate dimensions. + + Parameters + ---------- + model: timml.Model + Solved model to get heads from + data: Dict[str, Any] + + Returns + ------- + head: xr.DataArray + DataArray with dimensions ``("layer", "y", "x")``. + """ + x = np.arange(xmin, xmax, spacing) + 0.5 * spacing + # In geospatial rasters, y is DECREASING with row number + y = np.arange(ymax, ymin, -spacing) - 0.5 * spacing + nlayer = model.aq.find_aquifer_data(x[0], y[0]).naq + layer = [i for i in range(nlayer)] + head = np.empty((nlayer, y.size, x.size), dtype=np.float64) + for i in tqdm.tqdm(range(y.size)): + for j in range(x.size): + head[:, i, j] = model.head(x[j], y[i], layer) + + return xr.DataArray( + data=head, + name="head", + coords={"layer": layer, "y": y, "x": x}, + dims=("layer", "y", "x"), + ) + + def compute_steady( inpath: Union[pathlib.Path, str], outpath: Union[pathlib.Path, str], - cellsize: float, - active_elements: Dict[str, bool], ) -> None: inpath = pathlib.Path(inpath) outpath = pathlib.Path(outpath) - timml_spec, _ = gistim.model_specification(inpath, active_elements) - timml_model, _, observations = gistim.timml_elements.initialize_model(timml_spec) + with open(inpath, "r") as f: + data = json.load(f) + + crs = data.pop("crs") + observations = data.pop("Observations", None) + headgrid = data.pop("headgrid") + timml_model = initialize_timml(data) timml_model.solve() - extent, crs = gistim.gridspec(inpath, cellsize) - gdfs_obs = gistim.timml_elements.head_observations(timml_model, observations) - head = gistim.timml_elements.headgrid(timml_model, extent, cellsize) + gdfs_obs = timml_head_observations(timml_model, observations) + head = timml_headgrid(timml_model, **headgrid) write_raster(head, crs, outpath) write_ugrid(head, crs, outpath) diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py index 4ca1520..8ed8dcb 100644 --- a/plugin/qgistim/core/elements/__init__.py +++ b/plugin/qgistim/core/elements/__init__.py @@ -5,6 +5,7 @@ from qgistim.core import geopackage from qgistim.core.elements.aquifer import Aquifer +from qgistim.core.elements.building_pit import BuildingPit from qgistim.core.elements.circular_area_sink import CircularAreaSink from qgistim.core.elements.constant import Constant from qgistim.core.elements.domain import Domain @@ -12,9 +13,13 @@ from qgistim.core.elements.head_line_sink import HeadLineSink from qgistim.core.elements.headwell import HeadWell from qgistim.core.elements.impermeable_line_doublet import ImpermeableLineDoublet +from qgistim.core.elements.leaky_building_pit import LeakyBuildingPit from qgistim.core.elements.leaky_line_doublet import LeakyLineDoublet from qgistim.core.elements.line_sink_ditch import LineSinkDitch from qgistim.core.elements.observation import Observation +from qgistim.core.elements.polygon_area_sink import PolygonAreaSink +from qgistim.core.elements.polygon_inhomogeneity import PolygonInhomogeneity +from qgistim.core.elements.polygon_semi_confined_top import PolygonSemiConfinedTop from qgistim.core.elements.uniform_flow import UniformFlow from qgistim.core.elements.well import Well @@ -32,11 +37,11 @@ CircularAreaSink, ImpermeableLineDoublet, LeakyLineDoublet, - # PolygonAreaSink, - # PolygonSemiConfinedTop, - # PolygonInhomogeneity, - # BuildingPit, - # LeakyBuildingPit, + PolygonAreaSink, + PolygonSemiConfinedTop, + PolygonInhomogeneity, + BuildingPit, + LeakyBuildingPit, Observation, ) } @@ -62,7 +67,7 @@ def parse_name(layername: str) -> Tuple[str, str, str]: "Temporal Settings": "Aquifer", "Polygon Inhomogeneity Properties": "Polygon Inhomogeneity", "Building Pit Properties": "Building Pit", - "Leaky Building Pit Properties": "Building Pit", + "Leaky Building Pit Properties": "Leaky Building Pit", } element_type = mapping.get(element_type, element_type) if "timml" in prefix: diff --git a/plugin/qgistim/core/elements/building_pit.py b/plugin/qgistim/core/elements/building_pit.py index e69de29..14a842d 100644 --- a/plugin/qgistim/core/elements/building_pit.py +++ b/plugin/qgistim/core/elements/building_pit.py @@ -0,0 +1,96 @@ +from typing import Any, Dict + +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import RED, TRANSPARENT_RED +from qgistim.core.elements.element import AssociatedElement, ElementSchema +from qgistim.core.schemata import ( + AllGreaterEqual, + AllRequired, + Membership, + OffsetAllRequired, + OptionalFirstOnly, + Positive, + Range, + Required, + SemiConfined, + StrictlyDecreasing, +) + + +class BuildingPitSchema(ElementSchema): + timml_schemata = { + "inhomogeneity_id": Required(Membership("properties inhomogeneity_id")), + "order": Required(Positive()), + "ndegrees": Required(Positive()), + } + + +class AssociatedBuildingPitSchema(ElementSchema): + timml_schemata = { + "inhomogeneity_id": AllRequired(), + "layer": AllRequired(Range()), + "aquifer_top": AllRequired(StrictlyDecreasing()), + "aquifer_bottom": AllRequired(StrictlyDecreasing()), + "aquitard_c": OffsetAllRequired(Positive()), + "aquifer_k": AllRequired(Positive()), + "semiconf_top": OptionalFirstOnly(), + "semiconf_head": OptionalFirstOnly(), + } + timml_consistency_schemata = ( + SemiConfined(), + AllGreaterEqual("aquifer_top", "aquifer_bottom"), + ) + + +class BuildingPit(AssociatedElement): + element_type = "Building Pit" + geometry_type = "Polygon" + timml_attributes = ( + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("order", QVariant.Int), + QgsField("ndegrees", QVariant.Int), + ) + assoc_attributes = [ + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("wall_in_layer", QVariant.Bool), + QgsField("aquitard_npor", QVariant.Double), + QgsField("aquifer_npor", QVariant.Double), + ] + timml_defaults = { + "order": QgsDefaultValue("4"), + "ndegrees": QgsDefaultValue("6"), + "inhomogeneity_id": QgsDefaultValue("1"), + } + assoc_defaults = { + "inhomogeneity_id": QgsDefaultValue("1"), + "wall_in_layer": QgsDefaultValue("false"), + } + schema = BuildingPitSchema() + assoc_schema = AssociatedBuildingPitSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_RED, color_border=RED, width_border="0.75" + ) + + def process_timml_row(self, row: Dict[str, Any], grouped: Dict[int, Any]): + inhom_id = row["inhomogeneity_id"] + raw_data = grouped[inhom_id] + layers = [i for i, active in enumerate(raw_data["wall_in_layer"]) if active] + aquifer_data = self.aquifer_data(raw_data, transient=False) + return { + "xy": self.polygon_xy(row), + "order": row["order"], + "ndeg": row["ndegrees"], + "layers": layers, + **aquifer_data, + } diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index 9eb1306..486cd60 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -69,7 +69,7 @@ def renderer(self): color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" ) - def process_timml_row(self, row) -> Dict[str, Any]: + def process_timml_row(self, row, other=None) -> Dict[str, Any]: # Take the first vertex. x, y = self.point_xy(row) # Compare with the centroid to derive radius. diff --git a/plugin/qgistim/core/elements/constant.py b/plugin/qgistim/core/elements/constant.py index 423993a..2a9ea6f 100644 --- a/plugin/qgistim/core/elements/constant.py +++ b/plugin/qgistim/core/elements/constant.py @@ -28,7 +28,7 @@ class Constant(Element): def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=RED, name="star", size="5") - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): x, y = self.point_xy(row) return { "xr": x, diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index fa4db13..b07a412 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -53,7 +53,8 @@ import abc from collections import defaultdict -from typing import Any, Dict, List, Tuple +from copy import deepcopy +from typing import Any, Dict, List, Tuple, Union from PyQt5.QtWidgets import ( QDialog, @@ -81,11 +82,11 @@ class ElementSchema(abc.ABC): # TODO: check for presence of columns - timml_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} + timml_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} timml_consistency_schemata: Tuple[ConsistencySchema] = () - ttim_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} + ttim_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} ttim_consistency_schemata: Tuple[ConsistencySchema] = () - timeseries_schemata: Dict[str, SchemaContainer | IterableSchemaContainer] = {} + timeseries_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} @staticmethod def _validate_rows(schemata, consistency_schemata, data, other=None): @@ -180,7 +181,7 @@ def __init__(self, parent=None): self.setLayout(layout) -class Element(ExtractorMixin): +class Element(ExtractorMixin, abc.ABC): """ Abstract base class for "ordinary" timml elements. """ @@ -317,13 +318,24 @@ def remove_from_geopackage(self): def on_transient_changed(self, _): return + @staticmethod + def groupby(data: Dict[str, Any], key: str): + # TODO: sort by layer or time? + data = deepcopy(data) + grouper = data.pop(key) + grouped = {k: defaultdict(list) for k in grouper} + for key, values in data.items(): + for groupkey, value in zip(grouper, values): + grouped[groupkey][key].append(value) + return grouped + def to_timml(self, other=None): data = self.to_dict(self.timml_layer) errors = self.schema.validate_timml(data, other) if errors: return errors, data else: - elements = [self.process_timml_row(row) for row in data] + elements = [self.process_timml_row(row=row, other=other) for row in data] return [], elements def to_ttim(self, other=None): @@ -334,6 +346,7 @@ def aquifer_data(data, transient: bool): def interleave(a, b): return [value for pair in zip(a, b) for value in pair] + data = deepcopy(data) # Avoid side-effects hstar = data["semiconf_head"][0] semiconfined = ( data["aquitard_c"][0] is not None @@ -357,7 +370,8 @@ def interleave(a, b): z.pop(0) c.pop(0) porosity.pop(0) - s_aquitard.pop(0) + if transient: + s_aquitard.pop(0) aquifer = { "kaq": kaq, @@ -380,7 +394,7 @@ def interleave(a, b): return aquifer -class TransientElement(Element): +class TransientElement(Element, abc.ABC): """ Abstract base class for transient (ttim) elements. """ @@ -476,7 +490,7 @@ def to_ttim(self, other=None): return None, elements -class AssociatedElement(Element): +class AssociatedElement(Element, abc.ABC): """ Abstract class for elements that require associated tables such as Inhomogenities. @@ -520,14 +534,15 @@ def to_timml(self, other): if errors: return errors, None - other = other.copy() - other["inhomogeneity_ids"] = set([row["timeseries_id"] for row in properties]) - errors, data = super().to_timml(other) + other = other.copy() # Avoid side-effects + other["properties inhomogeneity_id"] = list(set(properties["inhomogeneity_id"])) + data = self.to_dict(self.timml_layer) + errors = self.schema.validate_timml(data, other) if errors: return errors, None grouped = self.groupby(properties, "inhomogeneity_id") - elements = [self.process_timml_row(row, grouped) for row in data] + elements = [self.process_timml_row(row=row, grouped=grouped) for row in data] return None, elements def to_ttim(self, _): diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index fd220df..f24b837 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -71,7 +71,7 @@ class HeadLineSink(TransientElement): def renderer(self) -> QgsSingleSymbolRenderer: return self.line_renderer(color=BLUE, width="0.75") - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): return { "xy": self.linestring_xy(row), "hls": row["head"], diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 24d86c7..1f1c870 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -67,7 +67,7 @@ class HeadWell(TransientElement): def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=BLUE, size="3") - def process_timml_row(self, row) -> Dict[str, Any]: + def process_timml_row(self, row, other=None) -> Dict[str, Any]: x, y = self.point_xy(row) return { "xw": x, diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py index a4e9ddf..b27ee87 100644 --- a/plugin/qgistim/core/elements/impermeable_line_doublet.py +++ b/plugin/qgistim/core/elements/impermeable_line_doublet.py @@ -29,7 +29,7 @@ class ImpermeableLineDoublet(Element): def renderer(self) -> QgsSingleSymbolRenderer: return self.line_renderer(color=RED, width="0.75") - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): return { "xy": self.linestring_xy(row), "layers": row["layer"], diff --git a/plugin/qgistim/core/elements/leaky_building_pit.py b/plugin/qgistim/core/elements/leaky_building_pit.py index e69de29..b366795 100644 --- a/plugin/qgistim/core/elements/leaky_building_pit.py +++ b/plugin/qgistim/core/elements/leaky_building_pit.py @@ -0,0 +1,103 @@ +from typing import Any, Dict + +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import RED, TRANSPARENT_RED +from qgistim.core.elements.element import AssociatedElement, ElementSchema +from qgistim.core.schemata import ( + AllGreaterEqual, + AllRequired, + Membership, + OffsetAllRequired, + OptionalFirstOnly, + Positive, + Range, + Required, + RequiredFirstOnly, + SemiConfined, + StrictlyDecreasing, +) + + +class LeakyBuildingPitSchema(ElementSchema): + timml_schemata = { + "inhomogeneity_id": Required(Membership("properties inhomogeneity_id")), + "order": Required(Positive()), + "ndegrees": Required(Positive()), + } + + +class AssociatedLeakyBuildingPitchema(ElementSchema): + timml_schemata = { + "inhomogeneity_id": AllRequired(), + "layer": AllRequired(Range()), + "aquifer_top": AllRequired(StrictlyDecreasing()), + "aquifer_bottom": AllRequired(StrictlyDecreasing()), + "aquitard_c": OffsetAllRequired(Positive()), + "aquifer_k": AllRequired(Positive()), + "semiconf_top": OptionalFirstOnly(), + "semiconf_head": OptionalFirstOnly(), + "resistance": RequiredFirstOnly(), + } + timml_consistency_schemata = ( + SemiConfined(), + AllGreaterEqual("aquifer_top", "aquifer_bottom"), + ) + + +class LeakyBuildingPit(AssociatedElement): + element_type = "Leaky Building Pit" + geometry_type = "Polygon" + timml_attributes = ( + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("order", QVariant.Int), + QgsField("ndegrees", QVariant.Int), + ) + assoc_attributes = [ + QgsField("inhomogeneity_id", QVariant.Int), + QgsField("layer", QVariant.Int), + QgsField("aquifer_top", QVariant.Double), + QgsField("aquifer_bottom", QVariant.Double), + QgsField("aquitard_c", QVariant.Double), + QgsField("aquifer_k", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("resistance", QVariant.Double), + QgsField("wall_in_layer", QVariant.Bool), + QgsField("aquitard_npor", QVariant.Double), + QgsField("aquifer_npor", QVariant.Double), + ] + timml_defaults = { + "order": QgsDefaultValue("4"), + "ndegrees": QgsDefaultValue("6"), + "inhomogeneity_id": QgsDefaultValue("1"), + } + assoc_defaults = { + "inhomogeneity_id": QgsDefaultValue("1"), + "wall_in_layer": QgsDefaultValue("false"), + } + schema = LeakyBuildingPitSchema() + assoc_schema = AssociatedLeakyBuildingPitchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_RED, + color_border=RED, + width_border="0.75", + outline_style="dash", + ) + + def process_timml_row(self, row: Dict[str, Any], grouped: Dict[int, Any]): + inhom_id = row["inhomogeneity_id"] + raw_data = grouped[inhom_id] + aquifer_data = self.aquifer_data(raw_data, transient=False) + layers = [i for i, active in enumerate(raw_data["wall_in_layer"]) if active] + return { + "xy": self.polygon_xy(row), + "order": row["order"], + "ndeg": row["ndegrees"], + "layers": layers, + "res": raw_data["resistance"][0], + **aquifer_data, + } diff --git a/plugin/qgistim/core/elements/leaky_line_doublet.py b/plugin/qgistim/core/elements/leaky_line_doublet.py index 06adf06..b1b8259 100644 --- a/plugin/qgistim/core/elements/leaky_line_doublet.py +++ b/plugin/qgistim/core/elements/leaky_line_doublet.py @@ -31,7 +31,7 @@ class LeakyLineDoublet(Element): def renderer(self) -> QgsSingleSymbolRenderer: return self.line_renderer(color=RED, width="0.75", outline_style="dash") - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): return { "xy": self.linestring_xy(row), "res": row["resistance"], diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index d3a709b..eb07d8c 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -71,7 +71,7 @@ class LineSinkDitch(TransientElement): def renderer(self) -> QgsSingleSymbolRenderer: return self.line_renderer(color=GREEN, width="0.75") - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): return { "xy": self.linestring_xy(row), "Qls": row["discharge"], diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index a04c29c..bdacc11 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -39,7 +39,7 @@ class Observation(TransientElement): def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): x, y = self.point_xy(row) return { "x": x, diff --git a/plugin/qgistim/core/elements/polygon_area_sink.py b/plugin/qgistim/core/elements/polygon_area_sink.py index 6fe08dd..b9d823e 100644 --- a/plugin/qgistim/core/elements/polygon_area_sink.py +++ b/plugin/qgistim/core/elements/polygon_area_sink.py @@ -1,11 +1,10 @@ +from copy import deepcopy + from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREEN, TRANSPARENT_GREEN from qgistim.core.elements.element import Element, ElementSchema -from qgistim.core.schemata import ( - Positive, - Required, -) +from qgistim.core.schemata import Positive, Required class PolygonAreaSinkSchema(ElementSchema): @@ -37,10 +36,15 @@ def renderer(self) -> QgsSingleSymbolRenderer: ) def process_timml_row(self, row, other): + raw_data = deepcopy(other["global_aquifer"]) + raw_data["aquitard_c"][0] = None + raw_data["semiconf_top"][0] = None + raw_data["semiconf_head"][0] = None + aquifer_data = self.aquifer_data(raw_data, transient=False) return { - "xy": self._polygon_xy(row), - "rate": row["rate"], + "xy": self.polygon_xy(row), "order": row["order"], "ndeg": row["ndegrees"], - **other["global_aquifer"], + "rate": row["rate"], + **aquifer_data, } diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py index 349d560..a17b2fa 100644 --- a/plugin/qgistim/core/elements/polygon_inhomogeneity.py +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -8,6 +8,7 @@ AllGreaterEqual, AllRequired, Membership, + NotBoth, OffsetAllRequired, OptionalFirstOnly, Positive, @@ -18,9 +19,9 @@ ) -class PolygonInhomgeneitySchema(ElementSchema): +class PolygonInhomogeneitySchema(ElementSchema): timml_schemata = { - "inhomogeneity_id": Required(Membership("inhomogeneity_id")), + "inhomogeneity_id": Required(Membership("properties inhomogeneity_id")), "order": Required(Positive()), "ndegrees": Required(Positive()), } @@ -62,8 +63,6 @@ class PolygonInhomogeneity(AssociatedElement): QgsField("semiconf_top", QVariant.Double), QgsField("semiconf_head", QVariant.Double), QgsField("rate", QVariant.Double), - QgsField("aquitard_s", QVariant.Double), - QgsField("aquifer_s", QVariant.Double), QgsField("aquitard_npor", QVariant.Double), QgsField("aquifer_npor", QVariant.Double), ] @@ -75,13 +74,7 @@ class PolygonInhomogeneity(AssociatedElement): assoc_defaults = { "inhomogeneity_id": QgsDefaultValue("1"), } - transient_columns = ( - "aquitard_s", - "aquifer_s", - "aquitard_npor", - "aquifer_npor", - ) - schema = PolygonInhomgeneitySchema() + schema = PolygonInhomogeneitySchema() assoc_schema = AssociatedPolygonInhomogeneitySchema() @property @@ -90,22 +83,13 @@ def renderer(self) -> QgsSingleSymbolRenderer: color=TRANSPARENT_GREY, color_border=GREY, width_border="0.75" ) - def on_transient_changed(self, transient: bool): - config = self.assoc_layer.attributeTableConfig() - columns = config.columns() - - for i, column in enumerate(columns): - if column.name in self.transient_columns: - config.setColumnHidden(i, not transient) - - self.assoc_layer.setAttributeTableConfig(config) - return - def process_timml_row(self, row: Dict[str, Any], grouped: Dict[int, Any]): inhom_id = row["inhomogeneity_id"] + raw_data = grouped[inhom_id] + aquifer_data = self.aquifer_data(raw_data, transient=False) return { - "xy": self._polygon_xy(row), + "xy": self.polygon_xy(row), "order": row["order"], "ndeg": row["ndegrees"], - **self._aquifer_data(grouped[inhom_id]), + **aquifer_data, } diff --git a/plugin/qgistim/core/elements/polygon_semi_confined_top.py b/plugin/qgistim/core/elements/polygon_semi_confined_top.py new file mode 100644 index 0000000..bd84489 --- /dev/null +++ b/plugin/qgistim/core/elements/polygon_semi_confined_top.py @@ -0,0 +1,53 @@ +from copy import deepcopy + +from PyQt5.QtCore import QVariant +from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from qgistim.core.elements.colors import BLUE, TRANSPARENT_BLUE +from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.schemata import Positive, Required + + +class PolygonSemiConfinedTopSchema(ElementSchema): + timml_schemata = { + "aquitard_c": Required(Positive()), + "semiconf_top": Required(), + "semiconf_head": Required(), + "order": Required(Positive()), + "ndegrees": Required(Positive()), + } + + +class PolygonSemiConfinedTop(Element): + element_type = "Polygon Semi-Confined Top" + geometry_type = "Polygon" + timml_attributes = ( + QgsField("aquitard_c", QVariant.Double), + QgsField("semiconf_top", QVariant.Double), + QgsField("semiconf_head", QVariant.Double), + QgsField("order", QVariant.Int), + QgsField("ndegrees", QVariant.Int), + ) + timml_defaults = { + "order": QgsDefaultValue("4"), + "ndegrees": QgsDefaultValue("6"), + } + schema = PolygonSemiConfinedTopSchema() + + @property + def renderer(self) -> QgsSingleSymbolRenderer: + return self.polygon_renderer( + color=TRANSPARENT_BLUE, color_border=BLUE, width_border="0.75" + ) + + def process_timml_row(self, row, other): + raw_data = deepcopy(other["global_aquifer"]) + raw_data["aquitard_c"][0] = row["aquitard_c"] + raw_data["semiconf_top"][0] = row["semiconf_top"] + raw_data["semiconf_head"][0] = row["semiconf_head"] + aquifer_data = self.aquifer_data(raw_data, transient=False) + return { + "xy": self.polygon_xy(row), + "order": row["order"], + "ndeg": row["ndegrees"], + **aquifer_data, + } diff --git a/plugin/qgistim/core/elements/uniform_flow.py b/plugin/qgistim/core/elements/uniform_flow.py index c013783..11f91e5 100644 --- a/plugin/qgistim/core/elements/uniform_flow.py +++ b/plugin/qgistim/core/elements/uniform_flow.py @@ -21,6 +21,6 @@ class UniformFlow(Element): ) schema = UniformFlowSchema() - def process_timml_row(self, row): + def process_timml_row(self, row, other=None): # No renaming or changes required return row diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index 06b13a6..34e906b 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -82,7 +82,7 @@ class Well(TransientElement): def renderer(self) -> QgsSingleSymbolRenderer: return self.marker_renderer(color=GREEN, size="3") - def process_timml_row(self, row) -> Dict[str, Any]: + def process_timml_row(self, row, other=None) -> Dict[str, Any]: x, y = self.point_xy(row) return { "xw": x, diff --git a/plugin/qgistim/core/formatting.py b/plugin/qgistim/core/formatting.py index a0f0b80..16d0252 100644 --- a/plugin/qgistim/core/formatting.py +++ b/plugin/qgistim/core/formatting.py @@ -154,7 +154,7 @@ def to_script_string(data: Dict[str, Any]) -> str: return "\n".join(strings) -def to_json(data: Dict[str, Any]) -> Dict[str, Any]: +def to_json(data: Dict[str, Any], cellsize: float) -> Dict[str, Any]: """ Take the data and add: @@ -176,7 +176,7 @@ def to_json(data: Dict[str, Any]) -> Dict[str, Any]: json_data = { "ModelMaq": aquifer_data, - "headgrid": headgrid_entry(domain_data, domain_data["cellsize"]), + "headgrid": headgrid_entry(domain_data, cellsize), } for layername, element_data in data.items(): diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index 5ff6a81..594f008 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -99,7 +99,7 @@ def _validate_schemata(self, data, other=None) -> ErrorList: class OptionalFirstOnly(SchemaContainer): - """Exclusively the first value must be provided.""" + """Exclusively the first value may be provided.""" def validate(self, data, other=None) -> ErrorList: if any(v is not None for v in data[1:]): @@ -110,6 +110,18 @@ def validate(self, data, other=None) -> ErrorList: return self._validate_schemata(data[0], other) +class RequiredFirstOnly(SchemaContainer): + """Exclusively the first value must be provided.""" + + def validate(self, data, other=None) -> ErrorList: + if data[0] is None: + return ["The first value must be filled in."] + elif any(v is not None for v in data[1:]): + return ["Only the first value may be filled in."] + else: + return self._validate_schemata(data[0], other) + + class Optional(SchemaContainer): def validate(self, data, other=None) -> ErrorList: if data is None: @@ -185,6 +197,8 @@ def __init__(self, x: str, y: str): self.y = y def validate(self, data, _=None) -> MaybeError: + print(data[self.x]) + print(data[self.y]) if (data[self.x] is not None) and (data[self.y] is not None): return f"Either {self.x} or {self.y} should be provided, not both." return None @@ -304,6 +318,9 @@ def validate(self, data, _=None) -> MaybeError: semitop = data["semiconf_top"][0] if semitop is not None and semitop <= data["aquifer_top"][0]: return "semiconf_top must be greater than first aquifer_top." + if "rate" in data: + if data["rate"][0] is not None and semitop: + return "A rate cannot be given when a semi-confined is enabled." return None diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 1e45a8f..f6bab78 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -239,18 +239,17 @@ def compute(self) -> None: Run a TimML computation with the current state of the currently active GeoPackage dataset. """ - active_elements = self.parent.active_elements() cellsize = self.cellsize_spin_box.value() - inpath = Path(self.parent.path).absolute() - outpath = Path(self.output_line_edit.text()).absolute() - mode = self.transient_combo_box.currentText().lower() + transient = self.transient_combo_box.currentText().lower() + is_transient = transient == "transient" + path = Path(self.output_line_edit.text()).absolute().with_suffix(".json") + self.parent.dataset_widget.convert_to_json( + path, cellsize=cellsize, transient=is_transient + ) data = { "operation": "compute", - "inpath": str(inpath), - "outpath": str(outpath), - "cellsize": cellsize, - "mode": mode, - "active_elements": active_elements, + "path": str(path), + "mode": transient, } # import json # print(json.dumps(data)) @@ -264,7 +263,8 @@ def compute(self) -> None: # task.finished(result) # Remove the output layers from QGIS, otherwise they cannot be overwritten. - gpkg_path = str(outpath) + return + gpkg_path = str(path) for layer in QgsProject.instance().mapLayers().values(): if Path(gpkg_path) == Path(layer.source()): QgsProject.instance().removeMapLayer(layer.id()) diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index b1ec38e..6a0e505 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -31,9 +31,9 @@ QVBoxLayout, QWidget, ) -from qgis.core import QgsApplication, QgsProject, QgsTask +from qgis.core import Qgis, QgsProject from qgistim.core.elements import Aquifer, Domain, load_elements_from_geopackage -from qgistim.core.task import BaseServerTask +from qgistim.core.formatting import to_json, to_script_string from qgistim.widgets.error_window import ValidationDialog SUPPORTED_TTIM_ELEMENTS = set( @@ -54,12 +54,6 @@ ) -class ConvertTask(BaseServerTask): - @property - def task_description(self): - return "converting GeoPackage to Python script" - - class DatasetTreeWidget(QTreeWidget): def __init__(self, parent=None): super().__init__(parent) @@ -212,20 +206,30 @@ def convert_to_timml(self): name = "timml Aquifer:Aquifer" aquifer = elements.pop(name) _errors, raw_data = aquifer.to_timml() - other = {"aquifer layers": raw_data["layer"]} + other = {"aquifer layers": raw_data["layer"], "global_aquifer": raw_data} if _errors: errors[name] = _errors else: aquifer_data = aquifer.aquifer_data(raw_data, transient=False) data[name] = aquifer_data - other["global_aquifer"] = aquifer_data for name, element in elements.items(): - _errors, _data = element.to_timml(other) - if _errors: - errors[name] = _errors - else: - data[name] = _data + try: + _errors, _data = element.to_timml(other) + if _errors: + errors[name] = _errors + elif _data: # skip empty tables + data[name] = _data + except RuntimeError as e: + if ( + e.args[0] + == "wrapped C/C++ object of type QgsVectorLayer has been deleted" + ): + # Delay of Qt garbage collection to blame? + pass + else: + raise e + return errors, data @@ -235,7 +239,6 @@ def __init__(self, parent): self.parent = parent self.dataset_tree = DatasetTreeWidget() self.start_task = None - self.convert_task = None self.dataset_tree.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding) self.dataset_line_edit = QLineEdit() self.dataset_line_edit.setEnabled(False) # Just used as a viewing port @@ -252,7 +255,7 @@ def __init__(self, parent): self.remove_button.clicked.connect(self.remove_geopackage_layer) self.add_button.clicked.connect(self.add_selection_to_qgis) self.convert_button = QPushButton("Convert GeoPackage to Python script") - self.convert_button.clicked.connect(self.convert_to_json) + self.convert_button.clicked.connect(self.convert_to_python) # Layout dataset_layout = QVBoxLayout() dataset_row = QHBoxLayout() @@ -279,7 +282,6 @@ def path(self) -> str: def reset(self): # Set state back to defaults self.start_task = None - self.convert_task = None self.dataset_line_edit.setText("") self.dataset_tree.reset() return @@ -335,8 +337,6 @@ def load_geopackage(self) -> None: self.parent.on_transient_changed() return - return - def new_geopackage(self) -> None: """ Create a new GeoPackage file, and set it as the active dataset. @@ -446,52 +446,52 @@ def set_interpreter_interaction(self, value: bool): self.parent.set_interpreter_interaction(value) return - def convert(self) -> None: - outpath, _ = QFileDialog.getSaveFileName(self, "Select file", "", "*.py") - if outpath == "": # Empty string in case of cancel button press - return - - data = { - "operation": "convert", - "inpath": self.path, - "outpath": outpath, - } - self.convert_task = ConvertTask(self, data, self.parent.message_bar) - self.start_task = self.parent.start_interpreter_task() - if self.start_task is not None: - self.convert_task.addSubTask( - self.start_task, [], QgsTask.ParentDependsOnSubTask - ) - - self.parent.set_interpreter_interaction(False) - QgsApplication.taskManager().addTask(self.convert_task) - return - - def convert_to_json(self) -> None: + def _convert(self, transient: bool) -> None: if self.validation_dialog: self.validation_dialog.close() self.validation_dialog = None - outpath, _ = QFileDialog.getSaveFileName(self, "Select file", "", "*.py") - if outpath == "": # Empty string in case of cancel button press - return - errors, data = self.dataset_tree.convert_to_timml() if errors: self.validation_dialog = ValidationDialog(errors) + return None + + return data + + def convert_to_python(self, transient: bool) -> None: + outpath, _ = QFileDialog.getSaveFileName(self, "Select file", "", "*.py") + if outpath == "": # Empty string in case of cancel button press + return + data = self._convert(transient=transient) + if data is None: return - # TODO - data["timml Domain:Domain"][ - "cellsize" - ] = self.parent.compute_widget.cellsize_spin_box.value() + script = to_script_string(data) + with open(outpath, "w") as f: + f.write(script) - from qgistim.core.formatting import to_json, to_script_string + self.parent.message_bar.pushMessage( + title="Info", + text=f"Converted geopackage to Python script: {outpath}", + level=Qgis.Info, + ) + return - to_python = to_script_string(data) - print(to_python) + def convert_to_json(self, path: str, cellsize: float, transient: bool) -> None: + data = self._convert(transient=transient) + if data is None: + return - json_data = to_json(data) - json_string = json.dumps(json_data, indent=4) - print(json_string) + json_data = to_json(data, cellsize=cellsize) + # Collect EPSG code. + # TODO: use toWkt() instead? + json_data["crs"] = self.parent.crs.authid() + with open(path, "w") as fp: + json.dump(json_data, fp=fp, indent=4) + + self.parent.message_bar.pushMessage( + title="Info", + text=f"Converted geopackage to JSON: {path}", + level=Qgis.Info, + ) return diff --git a/plugin/qgistim/widgets/elements_widget.py b/plugin/qgistim/widgets/elements_widget.py index ba24317..c60c73a 100644 --- a/plugin/qgistim/widgets/elements_widget.py +++ b/plugin/qgistim/widgets/elements_widget.py @@ -1,6 +1,7 @@ from functools import partial from PyQt5.QtWidgets import QGridLayout, QPushButton, QVBoxLayout, QWidget +from qgis.core import Qgis from qgistim.core.elements import ELEMENTS @@ -60,7 +61,13 @@ def tim_element(self, element_type: str) -> None: except ValueError: return - element = klass.dialog(self.parent.path, crs, self.parent.iface, names) + try: + element = klass.dialog(self.parent.path, crs, self.parent.iface, names) + except ValueError as e: + msg = str(e) + self.parent.message_bar.pushMessage("Error", msg, level=Qgis.Critical) + return + if element is None: # dialog cancelled return # Write to geopackage From 99bbf58d4e6401a43d833d20e91a1fc3d902b5b5 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 26 Sep 2023 22:56:23 +0200 Subject: [PATCH 12/41] WIP * More CRS info * Avoid rioxarray * Start with non-GDAL geopackage * Run TimML model from JSON * Transient as bool --- gistim/__init__.py | 4 - gistim/__main__.py | 50 +-- gistim/compute.py | 126 +++--- gistim/geopackage.py | 140 +++++++ gistim/timml_elements.py | 474 ----------------------- plugin/qgistim/widgets/compute_widget.py | 8 +- plugin/qgistim/widgets/dataset_widget.py | 11 +- 7 files changed, 233 insertions(+), 580 deletions(-) create mode 100644 gistim/geopackage.py delete mode 100644 gistim/timml_elements.py diff --git a/gistim/__init__.py b/gistim/__init__.py index 5bc16c3..8647a4c 100644 --- a/gistim/__init__.py +++ b/gistim/__init__.py @@ -1,9 +1,5 @@ -import pathlib -from typing import Dict, Union - import pkg_resources -from gistim import timml_elements, ttim_elements from gistim.compute import compute_steady, compute_transient from gistim.data_extraction import as_aquifer_aquitard, layer_statistics from gistim.ugrid import to_ugrid2d diff --git a/gistim/__main__.py b/gistim/__main__.py index a52f2c5..96297e7 100644 --- a/gistim/__main__.py +++ b/gistim/__main__.py @@ -81,21 +81,12 @@ def handle(line) -> None: # print(json.dumps(data, indent=4)) operation = data.pop("operation") - if operation == "compute": - gistim.compute( - inpath=data["inpath"], - outpath=data["outpath"], - cellsize=data["cellsize"], + gistim.compute.compute( + path=data["path"], mode=data["mode"], - active_elements=data["active_elements"], ) - response = "Computation of {inpath} to {outpath}".format(**data) - elif operation == "convert": - inpath = data["inpath"] - outpath = data["outpath"] - gistim.convert_to_script(inpath, outpath) - response = "Conversion of {inpath} to {outpath}".format(**data) + response = "Computation of {path}".format(**data) elif operation == "extract": inpath = data["inpath"] outpath = data["outpath"] @@ -148,33 +139,8 @@ def extract(args) -> None: gistim.data_extraction.netcdf_to_table(inpath, outpath, wkt_geometry) -def convert(args) -> None: - """ - Convert a Geopackage into a Python script. - """ - inpath = args.inpath[0] - outpath = args.outpath[0] - gistim.convert_to_script(inpath, outpath) - - def compute(args) -> None: - jsonpath = args.jsonpath[0] - - with open(jsonpath, "r") as f: - data = json.loads(f.read()) - - if data["mode"] == "steady-state": - gistim.compute_steady( - inpath=data["inpath"], - outpath=data["outpath"], - ) - elif data["mode"] == "transient": - gistim.compute_transient( - inpath=data["inpath"], - outpath=data["outpath"], - ) - else: - raise ValueError("Mode should be transient or steady-state.") + gistim.compute.compute(path=args.path[0], transient=args.transient[0]) return @@ -197,12 +163,10 @@ def compute(args) -> None: parser_extract.add_argument("outpath", type=str, nargs=1, help="outpath") parser_extract.add_argument("wkt", type=str, nargs=1, help="wkt") - parser_convert.set_defaults(func=convert) - parser_convert.add_argument("inpath", type=str, nargs=1, help="inpath") - parser_convert.add_argument("outpath", type=str, nargs=1, help="outpath") - parser_compute.set_defaults(func=compute) - parser_compute.add_argument("jsonpath", type=str, nargs=1, help="jsonpath") + parser_compute.add_argument("path", type=str, nargs=1, help="path to JSON file") + parser_compute.add_argument("--transient", action=argparse.BooleanOptionalAction) + parser.set_defaults(transient=False) # Parse and call the appropriate function args = parser.parse_args() diff --git a/gistim/compute.py b/gistim/compute.py index 3cae047..218df67 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -1,15 +1,16 @@ import json import pathlib +from collections import defaultdict from typing import Dict, Union -import geopandas as gpd import numpy as np -import rioxarray # noqa # pylint: disable=unused-import +import pandas as pd import timml import tqdm import ttim import xarray as xr +from gistim.geopackage import CoordinateReferenceSystem, write_geopackage from gistim.ugrid import to_ugrid2d TIMML_MAPPING = { @@ -30,30 +31,45 @@ def write_raster( head: xr.DataArray, - crs: int, + crs: CoordinateReferenceSystem, outpath: Union[pathlib.Path, str], ) -> None: - out = head.rio.write_crs(crs).rio.write_coordinate_system() - out.to_netcdf(outpath.with_suffix(".nc")) + # Write GDAL required metadata. + head["spatial_ref"] = xr.DataArray( + np.int32(0), attrs={"crs_wkt": crs.wkt, "spatial_ref": crs.wkt} + ) + head.attrs["grid_mapping"] = "spatial_ref" + head["x"].attrs = { + "axis": "X", + "long_name": "x coordinate of projection", + "standard_name": "projection_x_coordinate", + } + head["y"].attrs = { + "axis": "Y", + "long_name": "y coordinate of projection", + "standard_name": "projection_y_coordinate", + } + head.to_netcdf(outpath.with_suffix(".nc"), format="NETCDF3_CLASSIC") return def write_ugrid( head: xr.DataArray, - crs: int, + crs: CoordinateReferenceSystem, outpath: Union[pathlib.Path, str], ) -> None: ugrid_head = to_ugrid2d(head) + # Write MDAL required metadata. ugrid_head["projected_coordinate_system"] = xr.DataArray( data=np.int32(0), - attrs={"epsg": np.int32(crs.to_epsg())}, + attrs={"epsg": np.int32(crs.srs_id)}, ) - ugrid_head.to_netcdf(outpath.with_suffix(".ugrid.nc")) + ugrid_head.to_netcdf(outpath.with_suffix(".ugrid.nc"), format="NETCDF3_CLASSIC") return def write_vector( - gdf_head: gpd.GeoDataFrame, + gdf_head, crs: int, outpath: Union[pathlib.Path, str], layername: str, @@ -71,35 +87,29 @@ def write_vector( def initialize_timml(data): aquifer = data.pop("ModelMaq") timml_model = timml.ModelMaq(**aquifer) - for elementtype, entry in data.items(): - klass = TIMML_MAPPING[elementtype] + elements = defaultdict(list) + for name, entry in data.items(): + klass = TIMML_MAPPING[entry["type"]] for kwargs in entry["data"]: element = klass(model=timml_model, **kwargs) - return timml_model - - -def timml_head_observations(model: timml.Model, observations: Dict) -> gpd.GeoDataFrame: - geodataframes = {} - for name, kwargs_collection in observations.items(): - heads = [] - xx = [] - yy = [] - labels = [] - for kwargs in kwargs_collection: - x = kwargs["x"] - y = kwargs["y"] - heads.append(model.head(x=x, y=y)) - xx.append(x) - yy.append(y) - labels.append(kwargs["label"]) - - d = {"geometry": gpd.points_from_xy(xx, yy), "label": labels} - for i, layerhead in enumerate(np.vstack(heads).T): - d[f"head_layer{i}"] = layerhead - - geodataframes[name] = gpd.GeoDataFrame(d) - - return geodataframes + elements[name].append(element) + return timml_model, elements + + +def timml_head_observations( + model: timml.Model, observations: Dict +) -> Dict[str, pd.DataFrame]: + d = {"geometry": [], "labels": []} + heads = [] + for kwargs in observations: + x = kwargs["x"] + y = kwargs["y"] + heads.append(model.head(x=x, y=y)) + d["geometry"].append({"type": "point", "coordinates": [x, y]}) + d["labels"].append(kwargs["label"]) + for i, layerhead in enumerate(np.vstack(heads).T): + d[f"head_layer{i}"] = layerhead + return pd.DataFrame(d) def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.DataArray: @@ -137,30 +147,33 @@ def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.Da def compute_steady( - inpath: Union[pathlib.Path, str], - outpath: Union[pathlib.Path, str], + path: Union[pathlib.Path, str], ) -> None: - inpath = pathlib.Path(inpath) - outpath = pathlib.Path(outpath) - - with open(inpath, "r") as f: + with open(path, "r") as f: data = json.load(f) - crs = data.pop("crs") - observations = data.pop("Observations", None) + crs = CoordinateReferenceSystem(**data.pop("crs")) + observation_names = [ + key for key, value in data.items() if value.get("type") == "Observation" + ] + observations = {name: data.pop(name) for name in observation_names} headgrid = data.pop("headgrid") - timml_model = initialize_timml(data) + timml_model, elements = initialize_timml(data) timml_model.solve() - gdfs_obs = timml_head_observations(timml_model, observations) + # Compute gridded head data and write to netCDF. head = timml_headgrid(timml_model, **headgrid) + write_raster(head, crs, path) + write_ugrid(head, crs, path) - write_raster(head, crs, outpath) - write_ugrid(head, crs, outpath) - - for name, gdf in gdfs_obs.items(): - write_vector(gdf, crs, outpath, layername=name) + # Compute observations and discharge, and write to geopackage. + tables = {} + if observations: + for layername, content in observations.items(): + tables[layername] = timml_head_observations(timml_model, content["data"]) + # TODO: Compute discharges... + write_geopackage(tables, crs, path) return @@ -207,3 +220,14 @@ def compute_transient( write_vector(gdf, crs, outpath, layername=name) return + + +def compute(path: str, transient: bool): + path = pathlib.Path(path) + if not path.exists(): + raise FileExistsError(path) + if transient: + compute_transient(path) + else: + compute_steady(path) + return diff --git a/gistim/geopackage.py b/gistim/geopackage.py new file mode 100644 index 0000000..66b3a64 --- /dev/null +++ b/gistim/geopackage.py @@ -0,0 +1,140 @@ +""" +Utilities to write data to a geopackage. +""" +import itertools +import sqlite3 +from pathlib import Path +from typing import Dict, List, NamedTuple + +import geomet +import pandas as pd + + +class CoordinateReferenceSystem(NamedTuple): + description: str + organization: str + srs_id: int + wkt: str + + +class BoundingBox(NamedTuple): + xmin: float + ymin: float + xmax: float + ymax: float + + +WGS84_WKT = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]]' +APPLICATION_ID = 1196444487 +USER_VERSION = 10200 + + +def create_gpkg_contents( + table_names: List[str], bounding_boxes: List[BoundingBox], srs_id: int +) -> pd.DataFrame: + # From records? + return pd.DataFrame( + data={ + "table_name": table_names, + "data_type": ["features"], + "identifier": table_names, + "description": "", + "last_change": pd.Timestamp.now(), + "min_x": [bb.xmin for bb in bounding_boxes], + "min_y": [bb.ymin for bb in bounding_boxes], + "max_x": [bb.xmax for bb in bounding_boxes], + "max_y": [bb.ymax for bb in bounding_boxes], + "srs_id": srs_id, + } + ) + + +def create_gkpg_spatial_ref_sys(crs: CoordinateReferenceSystem) -> pd.DataFrame: + return pd.DataFrame( + data={ + "srs_name": [ + "Undefined Cartesian SRS", + "Undefined geographic SRS", + "WGS 84 geodetic", + crs.description, + ], + "srs_id": [-1, 0, 4326, crs.srs_id], + "organization": ["NONE", "NONE", "EPSG", "EPSG"], + "organization_coordsys_id": [-1, 0, 4326, crs.organization], + "definition": ["undefined", "undefined", WGS84_WKT, crs.wkt], + "description": [ + "undefined Cartesian coordinate reference system", + "undefined geographic coordinate reference system", + "longitude/latitude coordinates in decimal degrees on the WGS 84 spheroid", + "", + ], + } + ) + + +def create_gpkg_geometry_columns( + table_names: List[str], + geometry_type_names: List[str], + srs_id: int, +) -> pd.DataFrame: + return pd.DataFrame( + data={ + "table_name": table_names, + "column_name": "geom", + "geometry_type_name": geometry_type_names, + "srs_id": srs_id, + "z": 0, + "m": 0, + } + ) + + +def points_bounding_box(points) -> BoundingBox: + x = [point[0] for point in points] + y = [point[1] for point in points] + return BoundingBox(xmin=min(x), ymin=min(y), xmax=max(x), ymax=max(y)) + + +def lines_bounding_box(lines) -> BoundingBox: + x, y = zip(*itertools.chain.from_iterable(line["coordinates"] for line in lines)) + return BoundingBox(xmin=min(x), ymin=min(y), xmax=max(x), ymax=max(y)) + + +def write_geopackage( + tables: Dict[str, pd.DataFrame], crs: CoordinateReferenceSystem, path: Path +) -> None: + try: + connection = sqlite3.connect(database=path.with_suffix(".output.gpkg")) + connection.execute(f"PRAGMA application_id = {APPLICATION_ID};") + connection.execute(f"PRAGMA user_version = {USER_VERSION};") + + table_names = [] + geometry_types = [] + bounding_boxes = [] + for layername, layerdata in tables.items(): + # TODO: + # * gather bounding boxes + # * gather geometry types + # * convert to geopackage WKB using geomet + table_names.append(layername) + + # Create mandatory geopackage tables. + gpkg_contents = create_gpkg_contents( + table_names=table_names, bounding_boxes=bounding_boxes, srs_id=crs.srs_id + ) + gpkg_geometry_columns = create_gpkg_geometry_columns( + table_names=table_names, + geometry_type_names=geometry_types, + srs_id=crs.srs_id, + ) + gpkg_spatial_ref_sys = create_gkpg_spatial_ref_sys(crs) + # Write to Geopackage database. + gpkg_contents.to_sql(name="gpkg_contents", con=connection) + gpkg_geometry_columns.to_sql(name="gpkg_geometry_columns", con=connection) + gpkg_spatial_ref_sys.to_sql(name="gpkg_spatial_ref_sys", con=connection) + + finally: + connection.commit() + connection.close() + + return diff --git a/gistim/timml_elements.py b/gistim/timml_elements.py deleted file mode 100644 index 9b910ae..0000000 --- a/gistim/timml_elements.py +++ /dev/null @@ -1,474 +0,0 @@ -""" -This model converts the geodataframe as read from the geopackage into keyword -arguments for TimML. - -These keyword arguments are used to initialize a model, or used to generate a -Python script. -""" -from collections import defaultdict -from typing import Any, Dict, List, Tuple - -import black -import geopandas as gpd -import numpy as np -import timml -import tqdm -import xarray as xr - -from gistim.common import ( - ElementSpecification, - TimmlModelSpecification, - aquifer_data, - dict_to_kwargs_code, - filter_nan, - headgrid_code, - linestring_coordinates, - point_coordinates, - polygon_coordinates, - sanitized, -) - - -def constant(spec: ElementSpecification) -> List[Dict[str, Any]]: - firstrow = filter_nan(spec.dataframe.iloc[0]) - x, y = point_coordinates(firstrow) - return [ - { - "xr": x, - "yr": y, - "hr": firstrow["head"], - "layer": firstrow["layer"], - "label": firstrow["label"], - } - ] - - -def uflow(spec: ElementSpecification) -> List[Dict[str, Any]]: - row = filter_nan(spec.dataframe.iloc[0]) - return [ - { - "slope": row["slope"], - "angle": row["angle"], - "label": row["label"], - } - ] - - -def observation(spec: ElementSpecification) -> List[Dict[str, Any]]: - dataframe = spec.dataframe - X, Y = point_coordinates(dataframe) - kwargslist = [] - kwargslist = [] - for row, x, y in zip(dataframe.to_dict("records"), X, Y): - row = filter_nan(row) - kwargslist.append( - { - "x": x, - "y": y, - "label": row["label"], - } - ) - return kwargslist - - -def well(spec: ElementSpecification) -> List[Dict[str, Any]]: - dataframe = spec.dataframe - X, Y = point_coordinates(dataframe) - kwargslist = [] - for row, x, y in zip(dataframe.to_dict("records"), X, Y): - row = filter_nan(row) - kwargslist.append( - { - "xw": x, - "yw": y, - "Qw": row["discharge"], - "rw": row["radius"], - "res": row["resistance"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def headwell(spec: ElementSpecification) -> List[Dict[str, Any]]: - dataframe = spec.dataframe - X, Y = point_coordinates(dataframe) - kwargslist = [] - for row, x, y in zip(dataframe.to_dict("records"), X, Y): - row = filter_nan(row) - kwargslist.append( - { - "xw": x, - "yw": y, - "hw": row["head"], - "rw": row["radius"], - "res": row["resistance"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def polygoninhom(spec: ElementSpecification) -> List[Dict[str, Any]]: - """ - Parameters - ---------- - dataframe: tuple of geopandas.GeoDataFrame - - Returns - ------- - None - """ - geometry = spec.dataframe - properties = spec.associated_dataframe.set_index("inhomogeneity_id") - # Iterate through the row containing the geometry - # and iterate through the associated table containing k properties. - kwarglist = [] - for row in geometry.to_dict("records"): - row = filter_nan(row) - dataframe = properties.loc[[row["inhomogeneity_id"]]] - kwargs = aquifer_data(dataframe) - kwargs["xy"] = polygon_coordinates(row) - kwargs["order"] = row["order"] - kwargs["ndeg"] = row["ndegrees"] - kwarglist.append(kwargs) - return kwarglist - - -def polygonareasink(spec: ElementSpecification) -> List[Dict[str, Any]]: - geometry = spec.dataframe - properties = spec.associated_dataframe.sort_values(by="layer").set_index("layer") - # Ensure there's no semiconfined top - dataframe = properties.copy() - dataframe.loc[0, "semiconf_head"] = np.nan - kwarglist = [] - for row in geometry.to_dict("records"): - row = filter_nan(row) - kwargs = aquifer_data(dataframe.reset_index()) - kwargs["N"] = row["rate"] - kwargs["xy"] = polygon_coordinates(row) - kwargs["order"] = row["order"] - kwargs["ndeg"] = row["ndegrees"] - kwarglist.append(kwargs) - return kwarglist - - -def polygontop(spec: ElementSpecification) -> List[Dict[str, Any]]: - geometry = spec.dataframe - properties = spec.associated_dataframe.sort_values(by="layer").set_index("layer") - kwarglist = [] - for row in geometry.to_dict("records"): - row = filter_nan(row) - dataframe = properties.copy() - dataframe.loc[0, "aquitard_c"] = row["aquitard_c"] - dataframe.loc[0, "semiconf_top"] = row["semiconf_top"] - dataframe.loc[0, "semiconf_head"] = row["semiconf_head"] - kwargs = aquifer_data(dataframe.reset_index()) - kwargs["xy"] = polygon_coordinates(row) - kwargs["order"] = row["order"] - kwargs["ndeg"] = row["ndegrees"] - kwarglist.append(kwargs) - return kwarglist - - -def buildingpit(spec: ElementSpecification) -> List[Dict[str, Any]]: - geometry = spec.dataframe - properties = spec.associated_dataframe.set_index("inhomogeneity_id") - # Iterate through the row containing the geometry - # and iterate through the associated table containing k properties. - kwarglist = [] - for row in geometry.to_dict("records"): - row = filter_nan(row) - dataframe = properties.loc[row["inhomogeneity_id"]] - kwargs = aquifer_data(dataframe) - kwargs["xy"] = polygon_coordinates(row) - kwargs["order"] = row["order"] - kwargs["ndeg"] = row["ndegrees"] - kwargs["layers"] = np.atleast_1d(row["layer"]) - kwarglist.append(kwargs) - return kwarglist - - -def headlinesink(spec: ElementSpecification) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "hls": row["head"], - "res": row["resistance"], - "wh": row["width"], - "order": row["order"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def linesinkditch(spec: ElementSpecification) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "Qls": row["discharge"], - "res": row["resistance"], - "wh": row["width"], - "order": row["order"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def leakylinedoublet(spec: ElementSpecification) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "res": row["resistance"], - "layers": row["layer"], - "order": row["order"], - "label": row["label"], - } - ) - return kwargslist - - -def implinedoublet(spec: ElementSpecification) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "layers": row["layer"], - "order": row["order"], - "label": row["label"], - } - ) - return kwargslist - - -def circareasink(spec: ElementSpecification) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - xc, yc = np.array(row["geometry"].centroid.coords)[0] - coords = np.array(row["geometry"].exterior.coords) - x, y = coords.T - # Use squared radii - radii2 = (x - xc) ** 2 + (y - yc) ** 2 - radius2 = radii2[0] - # Check whether geometry is close enough to a circle. - # Accept 1% deviation. - tolerance = 0.01 * radius2 - if not np.allclose(radii2, radius2, atol=tolerance): - raise ValueError("Circular Area Sink geometry is not circular") - - radius = np.sqrt(radius2) - kwargslist.append( - { - "xc": xc, - "yc": yc, - "R": radius, - "N": row["rate"], - } - ) - return kwargslist - - -def validate(spec: TimmlModelSpecification) -> None: - if spec.aquifer is None: - raise ValueError("Aquifer entry is missing") - # TODO: more checks - - -def head_observations(model: timml.Model, observations: Dict) -> gpd.GeoDataFrame: - geodataframes = {} - for name, kwargs_collection in observations.items(): - heads = [] - xx = [] - yy = [] - labels = [] - for kwargs in kwargs_collection: - x = kwargs["x"] - y = kwargs["y"] - heads.append(model.head(x=x, y=y)) - xx.append(x) - yy.append(y) - labels.append(kwargs["label"]) - - d = {"geometry": gpd.points_from_xy(xx, yy), "label": labels} - for i, layerhead in enumerate(np.vstack(heads).T): - d[f"head_layer{i}"] = layerhead - - geodataframes[name] = gpd.GeoDataFrame(d) - - return geodataframes - - -def headgrid(model: timml.Model, extent: Tuple[float], cellsize: float) -> xr.DataArray: - """ - Compute the headgrid of the TimML model, and store the results - in an xarray DataArray with the appropriate dimensions. - - Parameters - ---------- - model: timml.Model - Solved model to get heads from - extent: Tuple[float] - xmin, xmax, ymin, ymax - cellsize: float - Desired cell size of the output head grids - - Returns - ------- - head: xr.DataArray - DataArray with dimensions ``("layer", "y", "x")``. - """ - xmin, xmax, ymin, ymax = extent - x = np.arange(xmin, xmax, cellsize) + 0.5 * cellsize - # In geospatial rasters, y is DECREASING with row number - y = np.arange(ymax, ymin, -cellsize) - 0.5 * cellsize - nlayer = model.aq.find_aquifer_data(x[0], y[0]).naq - layer = [i for i in range(nlayer)] - head = np.empty((nlayer, y.size, x.size), dtype=np.float64) - for i in tqdm.tqdm(range(y.size)): - for j in range(x.size): - head[:, i, j] = model.head(x[j], y[i], layer) - - return xr.DataArray( - data=head, - name="head", - coords={"layer": layer, "y": y, "x": x}, - dims=("layer", "y", "x"), - ) - - -# Map the names of the elements to their constructors -MAPPING = { - "Constant": (constant, timml.Constant), - "Uniform Flow": (uflow, timml.Uflow), - "Circular Area Sink": (circareasink, timml.CircAreaSink), - "Well": (well, timml.Well), - "Head Well": (headwell, timml.HeadWell), - "Polygon Inhomogeneity": (polygoninhom, timml.PolygonInhomMaq), - "Polygon Area Sink": (polygonareasink, timml.PolygonInhomMaq), - "Polygon Semi-Confined Top": (polygontop, timml.PolygonInhomMaq), - "Head Line Sink": (headlinesink, timml.HeadLineSinkString), - "Line Sink Ditch": (linesinkditch, timml.LineSinkDitchString), - "Leaky Line Doublet": (leakylinedoublet, timml.LeakyLineDoubletString), - "Impermeable Line Doublet": (implinedoublet, timml.ImpLineDoubletString), - "Building Pit": (buildingpit, timml.BuildingPit), - "Leaky Building Pit": (buildingpit, timml.LeakyBuildingPit), - "Observation": (observation, None), -} - - -def initialize_model(spec: TimmlModelSpecification) -> timml.Model: - """ - Initialize a TimML analytic model based on the data in a geopackage. - - Parameters - ---------- - spec: ModelSpecification - Named tuple with the layer name of the aquifer and a dictionary of - other element names to its construction function. - - Returns - ------- - timml.Model - elements: Dict of number name to TimML element - observations: Dict of list - - Examples - -------- - - >>> import gistim - >>> path = "my-model.gpkg" - >>> spec = gistim.model_specification(path) - >>> model = gistim.initialize_model(spec) - >>> model.solve() - - """ - validate(spec) - model = timml.ModelMaq(**aquifer_data(spec.aquifer)) - elements = {} - observations = defaultdict(list) - for name, element_spec in spec.elements.items(): - if (not element_spec.active) or (len(element_spec.dataframe.index) == 0): - continue - - elementtype = element_spec.elementtype - if elementtype not in MAPPING: - msg = ( - f'Invalid element specification "{elementtype}". ' - f'Available types are: {", ".join(MAPPING.keys())}.' - ) - raise KeyError(msg) - - f_to_kwargs, element = MAPPING[elementtype] - for i, kwargs in enumerate(f_to_kwargs(element_spec)): - if elementtype == "Observation": - observations[name].append(kwargs) - else: - kwargs["model"] = model - elements[f"{name}_{i}"] = element(**kwargs) - - return model, elements, observations - - -def convert_to_script(spec: TimmlModelSpecification) -> str: - """ - Convert model specification to an equivalent Python script. - """ - modelkwargs = dict_to_kwargs_code(aquifer_data(spec.aquifer)) - - observations = {} - strings = [ - "import numpy as np", - "import timml", - f"model = timml.ModelMaq({modelkwargs})", - ] - for name, element_spec in spec.elements.items(): - elementtype = element_spec.elementtype - - if elementtype not in MAPPING: - msg = ( - f'Invalid element specification "{elementtype}". ' - f'Available types are: {", ".join(MAPPING.keys())}.' - ) - raise KeyError(msg) - - f_to_kwargs, element = MAPPING[elementtype] - for i, kwargs in enumerate(f_to_kwargs(element_spec)): - if elementtype == "Observation": - kwargs.pop("label") - kwargs = dict_to_kwargs_code(kwargs) - observations[f"observation_{sanitized(name)}_{i}"] = kwargs - else: - kwargs["model"] = "model" - kwargs = dict_to_kwargs_code(kwargs) - strings.append( - f"{sanitized(name)}_{i} = timml.{element.__name__}({kwargs})" - ) - - strings.append("model.solve()") - - xg, yg = headgrid_code(spec.domain) - strings.append(f"head = model.headgrid(xg={xg}, yg={yg})") - - # Add all the individual observation points - for name, kwargs in observations.items(): - strings.append(f"{name} = model.head({kwargs})") - - return black.format_str("\n".join(strings), mode=black.FileMode()) diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index f6bab78..af238bc 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -240,16 +240,15 @@ def compute(self) -> None: GeoPackage dataset. """ cellsize = self.cellsize_spin_box.value() - transient = self.transient_combo_box.currentText().lower() - is_transient = transient == "transient" + transient = self.transient_combo_box.currentText().lower() == "transient" path = Path(self.output_line_edit.text()).absolute().with_suffix(".json") self.parent.dataset_widget.convert_to_json( - path, cellsize=cellsize, transient=is_transient + path, cellsize=cellsize, transient=transient ) data = { "operation": "compute", "path": str(path), - "mode": transient, + "transient": transient, } # import json # print(json.dumps(data)) @@ -263,7 +262,6 @@ def compute(self) -> None: # task.finished(result) # Remove the output layers from QGIS, otherwise they cannot be overwritten. - return gpkg_path = str(path) for layer in QgsProject.instance().mapLayers().values(): if Path(gpkg_path) == Path(layer.source()): diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 6a0e505..82544d1 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -483,9 +483,14 @@ def convert_to_json(self, path: str, cellsize: float, transient: bool) -> None: return json_data = to_json(data, cellsize=cellsize) - # Collect EPSG code. - # TODO: use toWkt() instead? - json_data["crs"] = self.parent.crs.authid() + crs = self.parent.crs + organization, srs_id = crs.authid().split(":") + json_data["crs"] = { + "description": crs.description(), + "organization": organization, + "srs_id": srs_id, + "wkt": crs.toWkt(), + } with open(path, "w") as fp: json.dump(json_data, fp=fp, indent=4) From b3a2ef7c1cc483db87bd58d6aff23ff106bda5d7 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Wed, 27 Sep 2023 18:53:49 +0200 Subject: [PATCH 13/41] Write observations to geopackage --- gistim/compute.py | 4 ++-- gistim/geopackage.py | 56 +++++++++++++++++++++++++++++++++----------- 2 files changed, 44 insertions(+), 16 deletions(-) diff --git a/gistim/compute.py b/gistim/compute.py index 218df67..3e31084 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -105,8 +105,8 @@ def timml_head_observations( x = kwargs["x"] y = kwargs["y"] heads.append(model.head(x=x, y=y)) - d["geometry"].append({"type": "point", "coordinates": [x, y]}) - d["labels"].append(kwargs["label"]) + d["geometry"].append({"type": "Point", "coordinates": [x, y]}) + d["label"].append(kwargs["label"]) for i, layerhead in enumerate(np.vstack(heads).T): d[f"head_layer{i}"] = layerhead return pd.DataFrame(d) diff --git a/gistim/geopackage.py b/gistim/geopackage.py index 66b3a64..f6f2eb8 100644 --- a/gistim/geopackage.py +++ b/gistim/geopackage.py @@ -2,11 +2,12 @@ Utilities to write data to a geopackage. """ import itertools +import shutil import sqlite3 from pathlib import Path -from typing import Dict, List, NamedTuple +from typing import Dict, List, NamedTuple, Tuple -import geomet +import geomet.geopackage import pandas as pd @@ -89,34 +90,60 @@ def create_gpkg_geometry_columns( ) -def points_bounding_box(points) -> BoundingBox: - x = [point[0] for point in points] - y = [point[1] for point in points] +def collect_bounding_box(features, geometry_type) -> BoundingBox: + if geometry_type == "Point": + x = [] + y = [] + for point in features: + coordinates = point["coordinates"] + x.append(coordinates[0]) + y.append(coordinates[1]) + else: + x, y = zip( + *itertools.chain.from_iterable(line["coordinates"] for line in features) + ) return BoundingBox(xmin=min(x), ymin=min(y), xmax=max(x), ymax=max(y)) -def lines_bounding_box(lines) -> BoundingBox: - x, y = zip(*itertools.chain.from_iterable(line["coordinates"] for line in lines)) - return BoundingBox(xmin=min(x), ymin=min(y), xmax=max(x), ymax=max(y)) +def process_table(dataframe: pd.DataFrame) -> Tuple[pd.DataFrame, BoundingBox, str]: + geometry = dataframe.pop("geometry").to_numpy() + geometry_type = set(f["type"] for f in geometry) + if len(geometry_type) != 1: + raise ValueError( + f"Table should contain exactly one geometry type. Received: {geometry_type}" + ) + # Get first (and only) member of set. + geometry_type = next(iter(geometry_type)) + bounding_box = collect_bounding_box(geometry, geometry_type) + dataframe["geom"] = [geomet.geopackage.dumps(f) for f in geometry] + return dataframe, bounding_box, geometry_type def write_geopackage( tables: Dict[str, pd.DataFrame], crs: CoordinateReferenceSystem, path: Path ) -> None: + # We write all tables to a temporary GeoPackage with a dot prefix, and at + # the end move this over the target file. This does not throw a + # PermissionError if the file is open in QGIS. + gpkg_path = path.with_suffix(".output.gpkg") + temp_path = gpkg_path.with_stem(".") + # avoid adding tables to existing geopackage. + temp_path.unlink(missing_ok=True) + try: - connection = sqlite3.connect(database=path.with_suffix(".output.gpkg")) + connection = sqlite3.connect(database=temp_path) connection.execute(f"PRAGMA application_id = {APPLICATION_ID};") connection.execute(f"PRAGMA user_version = {USER_VERSION};") table_names = [] geometry_types = [] bounding_boxes = [] - for layername, layerdata in tables.items(): - # TODO: - # * gather bounding boxes - # * gather geometry types - # * convert to geopackage WKB using geomet + for layername, dataframe in tables.items(): + dataframe, bounding_box, geometry_type = process_table(dataframe) table_names.append(layername) + geometry_types.append(geometry_type) + bounding_boxes.append(bounding_box) + dataframe.to_sql(layername, con=connection) # Create mandatory geopackage tables. gpkg_contents = create_gpkg_contents( @@ -137,4 +164,5 @@ def write_geopackage( connection.commit() connection.close() + shutil.move(temp_path, gpkg_path) return From cda908da2511084c0548c4c3be6e0bb9915830f7 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 28 Sep 2023 07:57:09 +0200 Subject: [PATCH 14/41] Get compute button running again --- gistim/__main__.py | 2 +- gistim/compute.py | 2 +- plugin/qgistim/core/elements/uniform_flow.py | 7 +++++-- plugin/qgistim/core/extractor.py | 5 ++++- plugin/qgistim/widgets/compute_widget.py | 6 +++--- 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/gistim/__main__.py b/gistim/__main__.py index 96297e7..2e12e0a 100644 --- a/gistim/__main__.py +++ b/gistim/__main__.py @@ -84,7 +84,7 @@ def handle(line) -> None: if operation == "compute": gistim.compute.compute( path=data["path"], - mode=data["mode"], + transient=data["transient"], ) response = "Computation of {path}".format(**data) elif operation == "extract": diff --git a/gistim/compute.py b/gistim/compute.py index 3e31084..939b590 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -99,7 +99,7 @@ def initialize_timml(data): def timml_head_observations( model: timml.Model, observations: Dict ) -> Dict[str, pd.DataFrame]: - d = {"geometry": [], "labels": []} + d = {"geometry": [], "label": []} heads = [] for kwargs in observations: x = kwargs["x"] diff --git a/plugin/qgistim/core/elements/uniform_flow.py b/plugin/qgistim/core/elements/uniform_flow.py index 11f91e5..571fed1 100644 --- a/plugin/qgistim/core/elements/uniform_flow.py +++ b/plugin/qgistim/core/elements/uniform_flow.py @@ -22,5 +22,8 @@ class UniformFlow(Element): schema = UniformFlowSchema() def process_timml_row(self, row, other=None): - # No renaming or changes required - return row + return { + "slope": row["slope"], + "angle": row["angle"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/extractor.py b/plugin/qgistim/core/extractor.py index 3cfd0a9..396497b 100644 --- a/plugin/qgistim/core/extractor.py +++ b/plugin/qgistim/core/extractor.py @@ -32,7 +32,10 @@ def to_dict(cls, layer) -> List[Dict[str, Any]]: for key, value in data.items(): if value == NULL: data[key] = None - data["centroid"], data["geometry"] = cls.extract_coordinates(feature) + geomtype = layer.geometryType() + # Skip if no geometry is present. + if geomtype != geomtype.Null: + data["centroid"], data["geometry"] = cls.extract_coordinates(feature) features.append(data) return features diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index af238bc..7a0444e 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -59,9 +59,9 @@ def finished(self, result): self.parent.set_interpreter_interaction(True) if result: self.push_success_message() - self.parent.load_mesh_result(self.data["outpath"]) - self.parent.load_raster_result(self.data["outpath"]) - self.parent.load_vector_result(self.data["outpath"]) + self.parent.load_mesh_result(self.data["path"]) + self.parent.load_raster_result(self.data["path"]) + self.parent.load_vector_result(self.data["path"]) else: self.push_failure_message() return From 3c05195149866cfe130896aa69e93d82a7b00d73 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 28 Sep 2023 09:18:34 +0200 Subject: [PATCH 15/41] All TimML elements running in compute. Do not compute in case of validation errors. --- gistim/compute.py | 5 +++-- plugin/qgistim/core/elements/building_pit.py | 2 ++ plugin/qgistim/core/elements/headwell.py | 6 +++--- .../core/elements/leaky_building_pit.py | 2 ++ .../core/elements/polygon_area_sink.py | 2 +- plugin/qgistim/core/schemata.py | 7 +++++++ plugin/qgistim/widgets/compute_widget.py | 6 +++++- plugin/qgistim/widgets/dataset_widget.py | 21 ++++++++++++++++--- 8 files changed, 41 insertions(+), 10 deletions(-) diff --git a/gistim/compute.py b/gistim/compute.py index 939b590..ccade94 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -19,7 +19,7 @@ "CircAreaSink": timml.CircAreaSink, "Well": timml.Well, "HeadWell": timml.HeadWell, - "PolygonInhom": timml.PolygonInhomMaq, + "PolygonInhomMaq": timml.PolygonInhomMaq, "HeadLineSinkString": timml.HeadLineSinkString, "LineSinkDitchString": timml.LineSinkDitchString, "LeakyLineDoubletString": timml.LeakyLineDoubletString, @@ -173,7 +173,8 @@ def compute_steady( tables[layername] = timml_head_observations(timml_model, content["data"]) # TODO: Compute discharges... - write_geopackage(tables, crs, path) + if tables: + write_geopackage(tables, crs, path) return diff --git a/plugin/qgistim/core/elements/building_pit.py b/plugin/qgistim/core/elements/building_pit.py index 14a842d..939077b 100644 --- a/plugin/qgistim/core/elements/building_pit.py +++ b/plugin/qgistim/core/elements/building_pit.py @@ -7,6 +7,7 @@ from qgistim.core.schemata import ( AllGreaterEqual, AllRequired, + AtleastOneTrue, Membership, OffsetAllRequired, OptionalFirstOnly, @@ -36,6 +37,7 @@ class AssociatedBuildingPitSchema(ElementSchema): "aquifer_k": AllRequired(Positive()), "semiconf_top": OptionalFirstOnly(), "semiconf_head": OptionalFirstOnly(), + "wall_in_layer": AllRequired(AtleastOneTrue()), } timml_consistency_schemata = ( SemiConfined(), diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 1f1c870..adfbcf8 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -19,7 +19,7 @@ class HeadWellSchema(ElementSchema): "head": Required(), "radius": Required(Positive()), "resistance": Required(Positive()), - "layer": Required(Membership("aquifer_layers")), + "layer": Required(Membership("aquifer layers")), } ttim_consistency_schemata = ( AllOrNone(("time_start", "time_end", "head_transient")), @@ -72,9 +72,9 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: return { "xw": x, "yw": y, - "hw": row["discharge"], + "hw": row["head"], "rw": row["radius"], - "res": row["head"], + "res": row["resistance"], "layers": row["layer"], "label": row["label"], } diff --git a/plugin/qgistim/core/elements/leaky_building_pit.py b/plugin/qgistim/core/elements/leaky_building_pit.py index b366795..83aba68 100644 --- a/plugin/qgistim/core/elements/leaky_building_pit.py +++ b/plugin/qgistim/core/elements/leaky_building_pit.py @@ -7,6 +7,7 @@ from qgistim.core.schemata import ( AllGreaterEqual, AllRequired, + AtleastOneTrue, Membership, OffsetAllRequired, OptionalFirstOnly, @@ -38,6 +39,7 @@ class AssociatedLeakyBuildingPitchema(ElementSchema): "semiconf_top": OptionalFirstOnly(), "semiconf_head": OptionalFirstOnly(), "resistance": RequiredFirstOnly(), + "wall_in_layer": AllRequired(AtleastOneTrue()), } timml_consistency_schemata = ( SemiConfined(), diff --git a/plugin/qgistim/core/elements/polygon_area_sink.py b/plugin/qgistim/core/elements/polygon_area_sink.py index b9d823e..cf62e16 100644 --- a/plugin/qgistim/core/elements/polygon_area_sink.py +++ b/plugin/qgistim/core/elements/polygon_area_sink.py @@ -45,6 +45,6 @@ def process_timml_row(self, row, other): "xy": self.polygon_xy(row), "order": row["order"], "ndeg": row["ndegrees"], - "rate": row["rate"], + "N": row["rate"], **aquifer_data, } diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index 594f008..a13bd78 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -296,6 +296,13 @@ def validate(self, data, _=None) -> MaybeError: return None +class AtleastOneTrue(IterableSchema): + def validate(self, data, _=None) -> MaybeError: + if not any(value for value in data): + return "Atleast one row value must be true." + return None + + # Consistency schemata diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 7a0444e..7b5162b 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -242,9 +242,13 @@ def compute(self) -> None: cellsize = self.cellsize_spin_box.value() transient = self.transient_combo_box.currentText().lower() == "transient" path = Path(self.output_line_edit.text()).absolute().with_suffix(".json") - self.parent.dataset_widget.convert_to_json( + invalid_input = self.parent.dataset_widget.convert_to_json( path, cellsize=cellsize, transient=transient ) + # Early return in case some problems are found. + if invalid_input: + return + data = { "operation": "compute", "path": str(path), diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 82544d1..1c252c8 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -477,10 +477,25 @@ def convert_to_python(self, transient: bool) -> None: ) return - def convert_to_json(self, path: str, cellsize: float, transient: bool) -> None: + def convert_to_json(self, path: str, cellsize: float, transient: bool) -> bool: + """ + Parameters + ---------- + path: str + Path to JSON file to write. + cellsize: float + Cell size to use to compute the head grid. + transient: bool + Steady-state (False) or transient (True). + + Returns + ------- + invalid_input: bool + Whether validation has succeeded. + """ data = self._convert(transient=transient) if data is None: - return + return True json_data = to_json(data, cellsize=cellsize) crs = self.parent.crs @@ -499,4 +514,4 @@ def convert_to_json(self, path: str, cellsize: float, transient: bool) -> None: text=f"Converted geopackage to JSON: {path}", level=Qgis.Info, ) - return + return False From d14a74d3743cbca8e35e54cab1b6bfa8c92fdca7 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Sun, 1 Oct 2023 18:35:30 +0200 Subject: [PATCH 16/41] Add discharge wells, linesinks Make .renderer a classmethod instead of property --- gistim/compute.py | 52 +++++++++- gistim/geopackage.py | 2 +- plugin/qgistim/core/elements/building_pit.py | 6 +- .../core/elements/circular_area_sink.py | 6 +- plugin/qgistim/core/elements/constant.py | 6 +- plugin/qgistim/core/elements/domain.py | 6 +- plugin/qgistim/core/elements/element.py | 6 +- .../qgistim/core/elements/head_line_sink.py | 6 +- plugin/qgistim/core/elements/headwell.py | 6 +- .../core/elements/impermeable_line_doublet.py | 6 +- .../core/elements/leaky_building_pit.py | 6 +- .../core/elements/leaky_line_doublet.py | 6 +- .../qgistim/core/elements/line_sink_ditch.py | 6 +- plugin/qgistim/core/elements/observation.py | 6 +- .../core/elements/polygon_area_sink.py | 6 +- .../core/elements/polygon_inhomogeneity.py | 6 +- .../elements/polygon_semi_confined_top.py | 6 +- plugin/qgistim/core/elements/well.py | 6 +- plugin/qgistim/core/tim_elements.py | 96 +++++++++---------- plugin/qgistim/widgets/compute_widget.py | 12 +-- plugin/qgistim/widgets/dataset_widget.py | 2 +- plugin/qgistim/widgets/tim_widget.py | 2 +- 22 files changed, 154 insertions(+), 108 deletions(-) diff --git a/gistim/compute.py b/gistim/compute.py index ccade94..c442802 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -1,7 +1,7 @@ import json import pathlib from collections import defaultdict -from typing import Dict, Union +from typing import Any, Dict, Union import numpy as np import pandas as pd @@ -112,6 +112,12 @@ def timml_head_observations( return pd.DataFrame(d) +def timml_discharges(elements: Dict[str, Any]) -> Dict[str, pd.DataFrame]: + discharges = {} + for layername, content in elements.items(): + first_element = content[0] + + def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.DataArray: """ Compute the headgrid of the TimML model, and store the results @@ -146,6 +152,47 @@ def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.Da ) +def extract_discharges(elements, nlayers): + tables = {} + for layername, content in elements.items(): + sample = content[0] + + if isinstance(sample, timml.WellBase): + table_rows = [] + for well in content: + row = {f"discharge_layer{i}": q for i, q in enumerate(well.discharge())} + row["geometry"] = { + "type": "Point", + "coordinates": [well.xc[0], well.yc[0]], + } + table_rows.append(row) + tables[f"discharge-{layername}"] = pd.DataFrame.from_records(table_rows) + + elif isinstance(sample, (timml.LineSinkDitchString, timml.HeadLineSinkString)): + table_rows = [] + for linestring in content: + # Workaround for: https://github.com/mbakker7/timml/issues/85 + discharges = np.zeros((linestring.nls, nlayers)) + Q = ( + (linestring.parameters[:, 0] * linestring.dischargeinf()) + .reshape((linestring.nls, linestring.nlayers, linestring.order + 1)) + .sum(axis=2) + ) + discharges[:, linestring.layers[0]] = Q + + xy = linestring.xy + for q_layered, vertex0, vertex1 in zip(discharges, xy[:-1], xy[1:]): + row = {f"discharge_layer{i}": q for i, q in enumerate(q_layered)} + row["geometry"] = { + "type": "LineString", + "coordinates": [vertex0, vertex1], + } + table_rows.append(row) + tables[f"discharge-{layername}"] = pd.DataFrame.from_records(table_rows) + + return tables + + def compute_steady( path: Union[pathlib.Path, str], ) -> None: @@ -167,11 +214,10 @@ def compute_steady( write_ugrid(head, crs, path) # Compute observations and discharge, and write to geopackage. - tables = {} + tables = extract_discharges(elements, timml_model.aq.nlayers) if observations: for layername, content in observations.items(): tables[layername] = timml_head_observations(timml_model, content["data"]) - # TODO: Compute discharges... if tables: write_geopackage(tables, crs, path) diff --git a/gistim/geopackage.py b/gistim/geopackage.py index f6f2eb8..163bac6 100644 --- a/gistim/geopackage.py +++ b/gistim/geopackage.py @@ -37,7 +37,7 @@ def create_gpkg_contents( return pd.DataFrame( data={ "table_name": table_names, - "data_type": ["features"], + "data_type": "features", "identifier": table_names, "description": "", "last_change": pd.Timestamp.now(), diff --git a/plugin/qgistim/core/elements/building_pit.py b/plugin/qgistim/core/elements/building_pit.py index 939077b..897ce6e 100644 --- a/plugin/qgistim/core/elements/building_pit.py +++ b/plugin/qgistim/core/elements/building_pit.py @@ -78,9 +78,9 @@ class BuildingPit(AssociatedElement): schema = BuildingPitSchema() assoc_schema = AssociatedBuildingPitSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_RED, color_border=RED, width_border="0.75" ) diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index 486cd60..49f7087 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -63,9 +63,9 @@ class CircularAreaSink(TransientElement): ) schema = CircularAreaSinkSchema() - @property - def renderer(self): - return self.polygon_renderer( + @classmethod + def renderer(cls): + return cls.polygon_renderer( color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" ) diff --git a/plugin/qgistim/core/elements/constant.py b/plugin/qgistim/core/elements/constant.py index 2a9ea6f..f6361ce 100644 --- a/plugin/qgistim/core/elements/constant.py +++ b/plugin/qgistim/core/elements/constant.py @@ -24,9 +24,9 @@ class Constant(Element): ) schema = ConstantSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=RED, name="star", size="5") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=RED, name="star", size="5") def process_timml_row(self, row, other=None): x, y = self.point_xy(row) diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index c222b22..f0a1994 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -32,12 +32,12 @@ def __init__(self, path: str, name: str): self.timml_name = f"timml {self.element_type}:Domain" self.ttim_name = "ttim Computation Times:Domain" - @property - def renderer(self) -> QgsSingleSymbolRenderer: + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: """ Results in transparent fill, with a medium thick black border line. """ - return self.polygon_renderer( + return cls.polygon_renderer( color="255,0,0,0", color_border=BLACK, width_border="0.75" ) diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index b07a412..c8a8feb 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -28,7 +28,7 @@ and edit its data. Some elements require specific rendering in QGIS (e.g. no fill polygons), which -are supplied by the `.renderer` property. +are supplied by the `.renderer()` staticmethod. The coupling of separate tables (geometry table and time series table) is only explicit in the Dataset Tree. The only way of knowing that tables are @@ -284,8 +284,8 @@ def polygon_renderer(**kwargs): symbol = QgsFillSymbol.createSimple(kwargs) return QgsSingleSymbolRenderer(symbol) - @property - def renderer(self): + @classmethod + def renderer(cls): return None def timml_layer_from_geopackage(self) -> QgsVectorLayer: diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index f24b837..bcf95db 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -67,9 +67,9 @@ class HeadLineSink(TransientElement): ) schema = HeadLineSinkSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=BLUE, width="0.75") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=BLUE, width="0.75") def process_timml_row(self, row, other=None): return { diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index adfbcf8..ea5746d 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -63,9 +63,9 @@ class HeadWell(TransientElement): ) schema = HeadWellSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=BLUE, size="3") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=BLUE, size="3") def process_timml_row(self, row, other=None) -> Dict[str, Any]: x, y = self.point_xy(row) diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py index b27ee87..29266b0 100644 --- a/plugin/qgistim/core/elements/impermeable_line_doublet.py +++ b/plugin/qgistim/core/elements/impermeable_line_doublet.py @@ -25,9 +25,9 @@ class ImpermeableLineDoublet(Element): } schema = ImpermeableLineDoubletSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=RED, width="0.75") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=RED, width="0.75") def process_timml_row(self, row, other=None): return { diff --git a/plugin/qgistim/core/elements/leaky_building_pit.py b/plugin/qgistim/core/elements/leaky_building_pit.py index 83aba68..3463826 100644 --- a/plugin/qgistim/core/elements/leaky_building_pit.py +++ b/plugin/qgistim/core/elements/leaky_building_pit.py @@ -81,9 +81,9 @@ class LeakyBuildingPit(AssociatedElement): schema = LeakyBuildingPitSchema() assoc_schema = AssociatedLeakyBuildingPitchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_RED, color_border=RED, width_border="0.75", diff --git a/plugin/qgistim/core/elements/leaky_line_doublet.py b/plugin/qgistim/core/elements/leaky_line_doublet.py index b1b8259..acfc375 100644 --- a/plugin/qgistim/core/elements/leaky_line_doublet.py +++ b/plugin/qgistim/core/elements/leaky_line_doublet.py @@ -27,9 +27,9 @@ class LeakyLineDoublet(Element): } schema = LeakyLineDoubletSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=RED, width="0.75", outline_style="dash") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=RED, width="0.75", outline_style="dash") def process_timml_row(self, row, other=None): return { diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index eb07d8c..8d679d7 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -67,9 +67,9 @@ class LineSinkDitch(TransientElement): ) schema = LineSinkDitchSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=GREEN, width="0.75") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=GREEN, width="0.75") def process_timml_row(self, row, other=None): return { diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index bdacc11..ba54a6b 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -35,9 +35,9 @@ class Observation(TransientElement): transient_columns = ("timeseries_id",) schema = ObservationSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") def process_timml_row(self, row, other=None): x, y = self.point_xy(row) diff --git a/plugin/qgistim/core/elements/polygon_area_sink.py b/plugin/qgistim/core/elements/polygon_area_sink.py index cf62e16..0c3757a 100644 --- a/plugin/qgistim/core/elements/polygon_area_sink.py +++ b/plugin/qgistim/core/elements/polygon_area_sink.py @@ -29,9 +29,9 @@ class PolygonAreaSink(Element): } schema = PolygonAreaSinkSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" ) diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py index a17b2fa..55cd9c6 100644 --- a/plugin/qgistim/core/elements/polygon_inhomogeneity.py +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -77,9 +77,9 @@ class PolygonInhomogeneity(AssociatedElement): schema = PolygonInhomogeneitySchema() assoc_schema = AssociatedPolygonInhomogeneitySchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_GREY, color_border=GREY, width_border="0.75" ) diff --git a/plugin/qgistim/core/elements/polygon_semi_confined_top.py b/plugin/qgistim/core/elements/polygon_semi_confined_top.py index bd84489..0d81dd2 100644 --- a/plugin/qgistim/core/elements/polygon_semi_confined_top.py +++ b/plugin/qgistim/core/elements/polygon_semi_confined_top.py @@ -33,9 +33,9 @@ class PolygonSemiConfinedTop(Element): } schema = PolygonSemiConfinedTopSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_BLUE, color_border=BLUE, width_border="0.75" ) diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index 34e906b..c390008 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -78,9 +78,9 @@ class Well(TransientElement): ) schema = WellSchema() - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=GREEN, size="3") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=GREEN, size="3") def process_timml_row(self, row, other=None) -> Dict[str, Any]: x, y = self.point_xy(row) diff --git a/plugin/qgistim/core/tim_elements.py b/plugin/qgistim/core/tim_elements.py index 7bac796..f49aa06 100644 --- a/plugin/qgistim/core/tim_elements.py +++ b/plugin/qgistim/core/tim_elements.py @@ -28,7 +28,7 @@ and edit its data. Some elements require specific rendering in QGIS (e.g. no fill polygons), which -are supplied by the `.renderer` property. +are supplied by the `.renderer()` staticmethod. The coupling of separate tables (geometry table and time series table) is only explicit in the Dataset Tree. The only way of knowing that tables are @@ -219,8 +219,8 @@ def polygon_renderer(**kwargs): symbol = QgsFillSymbol.createSimple(kwargs) return QgsSingleSymbolRenderer(symbol) - @property - def renderer(self): + @classmethod + def renderer(cls): return None def timml_layer_from_geopackage(self) -> QgsVectorLayer: @@ -352,12 +352,12 @@ def __init__(self, path: str, name: str): self.timml_name = f"timml {self.element_type}:Domain" self.ttim_name = "ttim Computation Times:Domain" - @property - def renderer(self) -> QgsSingleSymbolRenderer: + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: """ Results in transparent fill, with a medium thick black border line. """ - return self.polygon_renderer( + return cls.polygon_renderer( color="255,0,0,0", color_border=BLACK, width_border="0.75" ) @@ -460,9 +460,9 @@ class Constant(Element): QgsField("label", QVariant.String), ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=RED, name="star", size="5") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=RED, name="star", size="5") class Observation(TransientElement): @@ -484,9 +484,9 @@ class Observation(TransientElement): } transient_columns = ("timeseries_id",) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") class Well(TransientElement): @@ -525,9 +525,9 @@ class Well(TransientElement): "timeseries_id", ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=GREEN, size="3") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=GREEN, size="3") class HeadWell(TransientElement): @@ -560,9 +560,9 @@ class HeadWell(TransientElement): "timeseries_id", ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.marker_renderer(color=BLUE, size="3") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.marker_renderer(color=BLUE, size="3") class HeadLineSink(TransientElement): @@ -595,9 +595,9 @@ class HeadLineSink(TransientElement): "timeseries_id", ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=BLUE, width="0.75") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=BLUE, width="0.75") class LineSinkDitch(TransientElement): @@ -630,9 +630,9 @@ class LineSinkDitch(TransientElement): "timeseries_id", ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=GREEN, width="0.75") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=GREEN, width="0.75") class ImpermeableLineDoublet(Element): @@ -647,9 +647,9 @@ class ImpermeableLineDoublet(Element): "order": QgsDefaultValue("4"), } - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=RED, width="0.75") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=RED, width="0.75") class LeakyLineDoublet(Element): @@ -665,9 +665,9 @@ class LeakyLineDoublet(Element): "order": QgsDefaultValue("4"), } - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.line_renderer(color=RED, width="0.75", outline_style="dash") + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.line_renderer(color=RED, width="0.75", outline_style="dash") class CircularAreaSink(TransientElement): @@ -694,9 +694,9 @@ class CircularAreaSink(TransientElement): "timeseries_id", ) - @property - def renderer(self): - return self.polygon_renderer( + @classmethod + def renderer(cls): + return cls.polygon_renderer( color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" ) @@ -714,9 +714,9 @@ class PolygonAreaSink(Element): "ndegrees": QgsDefaultValue("6"), } - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" ) @@ -736,9 +736,9 @@ class PolygonSemiConfinedTop(Element): "ndegrees": QgsDefaultValue("6"), } - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_BLUE, color_border=BLUE, width_border="0.75" ) @@ -781,9 +781,9 @@ class PolygonInhomogeneity(AssociatedElement): "aquifer_npor", ) - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_GREY, color_border=GREY, width_border="0.75" ) @@ -847,9 +847,9 @@ class BuildingPit(AbstractBuildingPit): QgsField("semiconf_head", QVariant.Double), ] - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_RED, color_border=RED, width_border="0.75" ) @@ -868,9 +868,9 @@ class LeakyBuildingPit(AbstractBuildingPit): QgsField("semiconf_head", QVariant.Double), ] - @property - def renderer(self) -> QgsSingleSymbolRenderer: - return self.polygon_renderer( + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + return cls.polygon_renderer( color=TRANSPARENT_RED, color_border=RED, width_border="0.75", diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 7b5162b..1a398bf 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -33,6 +33,7 @@ from qgis.gui import QgsMapLayerComboBox from qgistim.core import geopackage, layer_styling from qgistim.core.dummy_ugrid import write_dummy_ugrid +from qgistim.core.elements import ELEMENTS, parse_name from qgistim.core.processing import mesh_contours from qgistim.core.task import BaseServerTask @@ -445,14 +446,13 @@ def load_vector_result(self, path: Union[Path, str]) -> None: if add: if "timml Observation:" in layername or "ttim Observation" in layername: labels = layer_styling.number_labels("head_layer0") - light_blue = "166,206,227,255" - symbol = QgsMarkerSymbol.createSimple( - dict(color=light_blue, name="triangle", size="3") - ) - renderer = QgsSingleSymbolRenderer(symbol) + elif "discharge-" in layername: + labels = layer_styling.number_labels("discharge_layer0") else: labels = None - renderer = None + + _, element_type, _ = parse_name(layername) + renderer = ELEMENTS[element_type].renderer() self.parent.output_group.add_layer( layer, "vector", renderer=renderer, labels=labels ) diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 1c252c8..27a2b9b 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -293,7 +293,7 @@ def add_item_to_qgis(self, item) -> None: suppress = self.suppress_popup_checkbox.isChecked() # Start adding the layers maplayer = self.parent.input_group.add_layer( - element.timml_layer, "timml", element.renderer, suppress + element.timml_layer, "timml", element.renderer(), suppress ) self.parent.input_group.add_layer(element.ttim_layer, "ttim") self.parent.input_group.add_layer(element.assoc_layer, "timml") diff --git a/plugin/qgistim/widgets/tim_widget.py b/plugin/qgistim/widgets/tim_widget.py index 53e588c..7d7ccc4 100644 --- a/plugin/qgistim/widgets/tim_widget.py +++ b/plugin/qgistim/widgets/tim_widget.py @@ -295,7 +295,7 @@ def add_element(self, element: Any): self.input_group.add_layer( element.timml_layer, "timml", - renderer=element.renderer, + renderer=element.renderer(), suppress=suppress, ) self.input_group.add_layer(element.ttim_layer, "ttim") From fa068ee43ac9dcde8658d82bbd42ffd37137026c Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 16:57:55 +0200 Subject: [PATCH 17/41] WIP: Allow control over which output --- example.py | 47 +++++++ gistim/compute.py | 23 +++- gistim/ttim_elements.py | 2 +- plugin/qgistim/core/elements/__init__.py | 4 +- plugin/qgistim/core/elements/observation.py | 8 +- plugin/qgistim/core/formatting.py | 10 +- plugin/qgistim/core/processing.py | 15 +- plugin/qgistim/core/tim_elements.py | 6 +- plugin/qgistim/widgets/compute_widget.py | 144 +++++++++++++------- plugin/qgistim/widgets/dataset_widget.py | 42 ++++-- plugin/qgistim/widgets/tim_widget.py | 21 +-- 11 files changed, 223 insertions(+), 99 deletions(-) create mode 100644 example.py diff --git a/example.py b/example.py new file mode 100644 index 0000000..9ff1670 --- /dev/null +++ b/example.py @@ -0,0 +1,47 @@ +# %% +import gistim +import os + +# %% + +os.chdir("c:/tmp/timtests") +gistim.compute_steady(path="test3.json") + +# %% + +import numpy as np +import timml + +# %% + + +t_0 = 0.0 +t_1 = 10.0 +s_l = 0.7 +s_u = 0.3 +f_amp = 1.25 + +s_scaled = (f_amp - 1.0) * (t_1 - t_0) +L_sto = (s_scaled + s_l + s_u) / 2 +# %% + +count_L = np.round(np.log(L_sto / s_l) / np.log(f_amp)) +count_U = np.round(np.log(L_sto / s_u) / np.log(f_amp)) +# %% +count_L = max(np.round(count_L).astype(int) , 0) +count_U = max(np.round(count_U).astype(int) , 0) +# %% + +L_lower = f_amp ** (-count_L) +L_upper = f_amp ** (-count_U) +# %% +L_sto2 = s_scaled / (2 - L_lower - L_upper) +# %% + +L_lower = L_lower * L_sto2 +L_upper = L_upper * L_sto2 +# %% + +lower_dt = L_lower * (f_amp ** np.arange(count_L)) +upper_dt = L_upper * (f_amp ** np.arange(count_U))[::-1] +# %% diff --git a/gistim/compute.py b/gistim/compute.py index c442802..1c4664f 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -199,23 +199,32 @@ def compute_steady( with open(path, "r") as f: data = json.load(f) + output_options = data.pop("output_options") + headgrid = data.pop("headgrid") crs = CoordinateReferenceSystem(**data.pop("crs")) observation_names = [ - key for key, value in data.items() if value.get("type") == "Observation" + key for key, value in data.items() if value.get("type") == "Head Observation" ] observations = {name: data.pop(name) for name in observation_names} - headgrid = data.pop("headgrid") timml_model, elements = initialize_timml(data) timml_model.solve() # Compute gridded head data and write to netCDF. - head = timml_headgrid(timml_model, **headgrid) - write_raster(head, crs, path) - write_ugrid(head, crs, path) + if output_options["raster"] or output_options["mesh"]: + head = timml_headgrid(timml_model, **headgrid) + + if output_options["raster"]: + write_raster(head, crs, path) + if output_options["mesh"]: + write_ugrid(head, crs, path) # Compute observations and discharge, and write to geopackage. - tables = extract_discharges(elements, timml_model.aq.nlayers) - if observations: + if output_options["discharge"]: + tables = extract_discharges(elements, timml_model.aq.nlayers) + else: + tables = {} + + if output_options["head_observations"] and observations: for layername, content in observations.items(): tables[layername] = timml_head_observations(timml_model, content["data"]) diff --git a/gistim/ttim_elements.py b/gistim/ttim_elements.py index f6a4f19..6803fb9 100644 --- a/gistim/ttim_elements.py +++ b/gistim/ttim_elements.py @@ -416,7 +416,7 @@ def convert_to_script(spec: TtimModelSpecification) -> str: f_to_kwargs, element = MAPPING[elementtype] for i, kwargs in enumerate(f_to_kwargs(element_spec, modelkwargs["tstart"])): - if elementtype == "Observation": + if elementtype == "Head Observation": kwargs.pop("label") kwargs.pop("observation_id") kwargs = dict_to_kwargs_code(kwargs) diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py index 8ed8dcb..f7d1dba 100644 --- a/plugin/qgistim/core/elements/__init__.py +++ b/plugin/qgistim/core/elements/__init__.py @@ -16,7 +16,7 @@ from qgistim.core.elements.leaky_building_pit import LeakyBuildingPit from qgistim.core.elements.leaky_line_doublet import LeakyLineDoublet from qgistim.core.elements.line_sink_ditch import LineSinkDitch -from qgistim.core.elements.observation import Observation +from qgistim.core.elements.observation import HeadObservation from qgistim.core.elements.polygon_area_sink import PolygonAreaSink from qgistim.core.elements.polygon_inhomogeneity import PolygonInhomogeneity from qgistim.core.elements.polygon_semi_confined_top import PolygonSemiConfinedTop @@ -42,7 +42,7 @@ PolygonInhomogeneity, BuildingPit, LeakyBuildingPit, - Observation, + HeadObservation, ) } diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index ba54a6b..8b187d3 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -5,7 +5,7 @@ from qgistim.core.schemata import AllRequired, Positive, Required -class ObservationSchema(ElementSchema): +class HeadObservationSchema(ElementSchema): timml_schemata = { "geometry": Required(), } @@ -15,8 +15,8 @@ class ObservationSchema(ElementSchema): } -class Observation(TransientElement): - element_type = "Observation" +class HeadObservation(TransientElement): + element_type = "Head Observation" geometry_type = "Point" timml_attributes = ( QgsField("label", QVariant.String), @@ -33,7 +33,7 @@ class Observation(TransientElement): "timeseries_id": QgsDefaultValue("1"), } transient_columns = ("timeseries_id",) - schema = ObservationSchema() + schema = HeadObservationSchema() @classmethod def renderer(cls) -> QgsSingleSymbolRenderer: diff --git a/plugin/qgistim/core/formatting.py b/plugin/qgistim/core/formatting.py index 16d0252..5b6416b 100644 --- a/plugin/qgistim/core/formatting.py +++ b/plugin/qgistim/core/formatting.py @@ -24,7 +24,8 @@ "Impermeable Line Doublet": "ImpLineDoubletString", "Building Pit": "BuildingPit", "Leaky Building Pit": "LeakyBuildingPit", - "Observation": "Observation", + "Head Observation": "Head Observation", + "Discharge Observation": "Discharge Observation", } PREFIX = " " @@ -131,7 +132,7 @@ def to_script_string(data: Dict[str, Any]) -> str: timml_name = MAPPING[plugin_name] for i, kwargs in enumerate(element_data): - if plugin_name == "Observation": + if plugin_name == "Head Observation": # Should not be added to the model. kwargs.pop("label") observations.append( @@ -154,7 +155,7 @@ def to_script_string(data: Dict[str, Any]) -> str: return "\n".join(strings) -def to_json(data: Dict[str, Any], cellsize: float) -> Dict[str, Any]: +def to_json(data: Dict[str, Any], cellsize: float, output_options: Dict[str, bool]) -> Dict[str, Any]: """ Take the data and add: @@ -177,6 +178,7 @@ def to_json(data: Dict[str, Any], cellsize: float) -> Dict[str, Any]: json_data = { "ModelMaq": aquifer_data, "headgrid": headgrid_entry(domain_data, cellsize), + "output_options": output_options, } for layername, element_data in data.items(): @@ -184,5 +186,5 @@ def to_json(data: Dict[str, Any], cellsize: float) -> Dict[str, Any]: plugin_name = re.split("timml |ttim ", prefix)[1] timml_name = MAPPING[plugin_name] json_data[layername] = {"type": timml_name, "name": name, "data": element_data} - + return json_data diff --git a/plugin/qgistim/core/processing.py b/plugin/qgistim/core/processing.py index 6086168..59edffa 100644 --- a/plugin/qgistim/core/processing.py +++ b/plugin/qgistim/core/processing.py @@ -23,6 +23,8 @@ QgsVectorLayerTemporalProperties, ) +from qgistim.core import geopackage + def raster_steady_contours( layer: QgsRasterLayer, @@ -199,6 +201,7 @@ def transient_contours( def mesh_contours( + gpkg_path: str, layer: QgsMeshLayer, index: int, name: str, @@ -207,6 +210,14 @@ def mesh_contours( step: float, ) -> QgsVectorLayer: if layer.firstValidTimeStep().isValid(): - return transient_contours(layer, index, name, start, stop, step) + vector_layer = transient_contours(layer, index, name, start, stop, step) else: - return steady_contours(layer, index, name, start, stop, step) + vector_layer = steady_contours(layer, index, name, start, stop, step) + + written_layer = geopackage.write_layer( + path=gpkg_path, + layer=vector_layer, + layername=vector_layer.name(), + newfile=False, + ) + return written_layer diff --git a/plugin/qgistim/core/tim_elements.py b/plugin/qgistim/core/tim_elements.py index f49aa06..79f2efd 100644 --- a/plugin/qgistim/core/tim_elements.py +++ b/plugin/qgistim/core/tim_elements.py @@ -465,8 +465,8 @@ def renderer(cls) -> QgsSingleSymbolRenderer: return cls.marker_renderer(color=RED, name="star", size="5") -class Observation(TransientElement): - element_type = "Observation" +class HeadObservation(TransientElement): + element_type = "Head Observation" geometry_type = "Point" timml_attributes = ( QgsField("label", QVariant.String), @@ -897,7 +897,7 @@ def renderer(cls) -> QgsSingleSymbolRenderer: PolygonInhomogeneity, BuildingPit, LeakyBuildingPit, - Observation, + HeadObservation, ) } diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 1a398bf..b038972 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -6,7 +6,6 @@ from PyQt5.QtCore import Qt from PyQt5.QtWidgets import ( QCheckBox, - QComboBox, QDoubleSpinBox, QFileDialog, QGroupBox, @@ -20,12 +19,10 @@ from qgis.core import ( QgsApplication, QgsMapLayerProxyModel, - QgsMarkerSymbol, QgsMeshDatasetIndex, QgsMeshLayer, QgsProject, QgsRasterLayer, - QgsSingleSymbolRenderer, QgsTask, QgsVectorLayer, QgsVectorLayerTemporalProperties, @@ -60,9 +57,17 @@ def finished(self, result): self.parent.set_interpreter_interaction(True) if result: self.push_success_message() - self.parent.load_mesh_result(self.data["path"]) - self.parent.load_raster_result(self.data["path"]) + + # Load whatever data is available in the geopackage. self.parent.load_vector_result(self.data["path"]) + + if self.data["load_mesh"]: + self.parent.load_mesh_result( + self.data["path"], self.data["load_contours"] + ) + if self.data["load_raster"]: + self.parent.load_raster_result(self.data["path"]) + else: self.push_failure_message() return @@ -79,24 +84,40 @@ def __init__(self, parent=None): write_dummy_ugrid(self.dummy_ugrid_path) self.domain_button = QPushButton("Set to current extent") - self.transient_combo_box = QComboBox() - self.transient_combo_box.addItems(["Steady-state", "Transient"]) - self.transient_combo_box.currentTextChanged.connect(self.on_transient_changed) self.compute_button = QPushButton("Compute") self.compute_button.clicked.connect(self.compute) + + self.raster_checkbox = QCheckBox("Raster") + self.mesh_checkbox = QCheckBox("Mesh") + self.contours_checkbox = QCheckBox("Contours") + self.head_observations_checkbox = QCheckBox("Head Observations") + self.discharge_checkbox = QCheckBox("Discharge") + self.discharge_observations_checkbox = QCheckBox("Discharge Observations") + self.cellsize_spin_box = QDoubleSpinBox() self.cellsize_spin_box.setMinimum(0.0) self.cellsize_spin_box.setMaximum(10_000.0) self.cellsize_spin_box.setSingleStep(1.0) self.cellsize_spin_box.setValue(25.0) self.domain_button.clicked.connect(self.domain) + # By default: all output + self.raster_checkbox.setChecked(True) + self.mesh_checkbox.setChecked(True) + self.contours_checkbox.setChecked(True) + self.head_observations_checkbox.setChecked(True) + self.discharge_checkbox.setChecked(True) + + self.mesh_checkbox.toggled.connect(self.contours_checkbox.setEnabled) + self.mesh_checkbox.toggled.connect( + lambda checked: not checked and self.contours_checkbox.setChecked(False) + ) + # self.mesh_checkbox = QCheckBox("Trimesh") self.output_line_edit = QLineEdit() self.output_button = QPushButton("Set path as ...") self.output_button.clicked.connect(self.set_output_path) - self.contour_checkbox = QCheckBox("Auto-generate contours") - self.contour_button = QPushButton("Export contours") - self.contour_button.clicked.connect(self.export_contours) + self.contour_button = QPushButton("Redraw contours") + self.contour_button.clicked.connect(self.redraw_contours) self.contour_layer = QgsMapLayerComboBox() self.contour_layer.setFilters(QgsMapLayerProxyModel.MeshLayer) self.contour_min_box = QDoubleSpinBox() @@ -129,14 +150,21 @@ def __init__(self, parent=None): domain_layout.addWidget(self.domain_button) domain_layout.addLayout(domain_row) - result_row1 = QHBoxLayout() - result_row1.addWidget(self.transient_combo_box) - result_row1.addWidget(self.compute_button) - result_row2 = QHBoxLayout() - result_row2.addWidget(self.output_line_edit) - result_row2.addWidget(self.output_button) - result_layout.addLayout(result_row1) - result_layout.addLayout(result_row2) + output_row = QHBoxLayout() + output_row.addWidget(self.output_line_edit) + output_row.addWidget(self.output_button) + + button_row = QHBoxLayout() + button_row.addWidget(self.compute_button) + result_layout.addLayout(output_row) + + result_layout.addWidget(self.raster_checkbox) + result_layout.addWidget(self.mesh_checkbox) + result_layout.addWidget(self.contours_checkbox) + result_layout.addWidget(self.head_observations_checkbox) + result_layout.addWidget(self.discharge_checkbox) + + result_layout.addLayout(button_row) contour_row1 = QHBoxLayout() to_label = QLabel("to") @@ -151,7 +179,6 @@ def __init__(self, parent=None): contour_row2 = QHBoxLayout() contour_row2.addWidget(self.contour_layer) contour_row2.addWidget(self.contour_button) - contour_layout.addWidget(self.contour_checkbox) contour_layout.addLayout(contour_row1) contour_layout.addLayout(contour_row2) @@ -163,9 +190,13 @@ def __init__(self, parent=None): def reset(self): self.cellsize_spin_box.setValue(25.0) - self.contour_checkbox.setCheckState(False) self.transient_combo_box.setCurrentIndex(0) self.output_line_edit.setText("") + self.raster_checkbox.setCheckState(True) + self.mesh_checkbox.setCheckState(True) + self.contours_checkbox.setCheckState(True) + self.head_observations_checkbox.setCheckState(True) + self.discharge_checkbox.setCheckState(True) self.contour_min_box.setValue(-5.0) self.contour_max_box.setValue(5.0) self.contour_step_box.setValue(0.5) @@ -177,13 +208,6 @@ def set_interpreter_interaction(self, value: bool): def shutdown_server(self): self.parent.shutdown_server() - @property - def transient(self) -> bool: - return self.transient_combo_box.currentText() == "Transient" - - def on_transient_changed(self) -> None: - self.parent.on_transient_changed() - def contour_range(self) -> Tuple[float, float, float]: return ( float(self.contour_min_box.value()), @@ -199,8 +223,13 @@ def add_contour_layer(self, layer) -> None: self.parent.output_group.add_layer( layer, "vector", renderer=renderer, on_top=True, labels=labels ) + + @property + def output_path(self) -> str: + return self.output_line_edit.text() - def export_contours(self) -> None: + def redraw_contours(self) -> None: + path = Path(self.output_path) layer = self.contour_layer.currentLayer() renderer = layer.rendererSettings() index = renderer.activeScalarDatasetGroup() @@ -208,6 +237,7 @@ def export_contours(self) -> None: name = layer.datasetGroupMetadata(qgs_index).name() start, stop, step = self.contour_range() layer = mesh_contours( + gpkg_path=str(path.with_suffix(".output.gpkg")), layer=layer, index=index, name=name, @@ -219,10 +249,13 @@ def export_contours(self) -> None: return def set_output_path(self) -> None: - current = self.output_line_edit.text() - path, _ = QFileDialog.getSaveFileName(self, "Save output as...", current, "*") + current = self.output_path + path, _ = QFileDialog.getSaveFileName( + self, "Save output as...", current, "*.gpkg" + ) + if path != "": # Empty string in case of cancel button press - self.output_line_edit.setText(path) + self.output_line_edit.setText(str(Path(path).with_suffix(""))) # Note: Qt does pretty good validity checking of the Path in the # Dialog, there is no real need to validate path here. @@ -241,10 +274,18 @@ def compute(self) -> None: GeoPackage dataset. """ cellsize = self.cellsize_spin_box.value() - transient = self.transient_combo_box.currentText().lower() == "transient" - path = Path(self.output_line_edit.text()).absolute().with_suffix(".json") + transient = self.parent.dataset_widget.transient + output_options = { + "raster": self.raster_checkbox.isChecked(), + "mesh": self.mesh_checkbox.isChecked(), + "contours": self.contours_checkbox.isChecked(), + "head_observations": self.head_observations_checkbox.isChecked(), + "discharge": self.discharge_checkbox.isChecked(), + } + + path = Path(self.output_path).absolute().with_suffix(".json") invalid_input = self.parent.dataset_widget.convert_to_json( - path, cellsize=cellsize, transient=transient + path, cellsize=cellsize, transient=transient, output_options=output_options ) # Early return in case some problems are found. if invalid_input: @@ -254,6 +295,9 @@ def compute(self) -> None: "operation": "compute", "path": str(path), "transient": transient, + "load_raster": output_options["raster"], + "load_mesh": output_options["mesh"], + "load_contours": output_options["contours"], } # import json # print(json.dumps(data)) @@ -303,17 +347,13 @@ def set_cellsize_from_domain(self, ymax: float, ymin: float) -> None: dy = round(dy) self.cellsize_spin_box.setValue(dy) - def contouring(self) -> Tuple[bool, float, float, float]: - contour = self.contour_checkbox.isChecked() - start, stop, step = self.contour_range() - return contour, start, stop, step - - def load_mesh_result(self, path: Union[Path, str]) -> None: + def load_mesh_result(self, path: Union[Path, str], load_contours: bool) -> None: path = Path(path) # String for QGIS functions netcdf_path = str(path.with_suffix(".ugrid.nc")) - # Loop through layers first. If the path already exists as a layer source, remove it. - # Otherwise QGIS will not the load the new result (this feels like a bug?). + # Loop through layers first. If the path already exists as a layer + # source, remove it. Otherwise QGIS will not the load the new result + # (this feels like a bug?). for layer in QgsProject.instance().mapLayers().values(): if Path(netcdf_path) == Path(layer.source()): QgsProject.instance().removeMapLayer(layer.id()) @@ -322,10 +362,7 @@ def load_mesh_result(self, path: Union[Path, str]) -> None: layer = QgsMeshLayer(netcdf_path, f"{path.stem}", "mdal") indexes = layer.datasetGroupsIndexes() - - contour, start, stop, step = self.contouring() contour_layers = [] - for index in indexes: qgs_index = QgsMeshDatasetIndex(group=index, dataset=0) name = layer.datasetGroupMetadata(qgs_index).name() @@ -342,8 +379,14 @@ def load_mesh_result(self, path: Union[Path, str]) -> None: index_layer.setRendererSettings(renderer) self.parent.output_group.add_layer(index_layer, "mesh") - if contour: + if load_contours: + # Should generally result in 20 contours. + start = scalar_settings.classificationMinimum() + stop = scalar_settings.classificationMaximum() + step = (stop - start) / 21 + contour_layer = mesh_contours( + gpkg_path=str(path.with_suffix(".output.gpkg")), layer=index_layer, index=index, name=name, @@ -354,7 +397,7 @@ def load_mesh_result(self, path: Union[Path, str]) -> None: contour_layers.append(contour_layer) # Add the contours in the appropriate order: highest (deepest) layer first! - if contour: + if load_contours: for contour_layer in contour_layers[::-1]: self.add_contour_layer(contour_layer) @@ -444,7 +487,10 @@ def load_vector_result(self, path: Union[Path, str]) -> None: temporal_properties.setIsActive(False) if add: - if "timml Observation:" in layername or "ttim Observation" in layername: + if ( + "timml Head Observation:" in layername + or "ttim Head Observation" in layername + ): labels = layer_styling.number_labels("head_layer0") elif "discharge-" in layername: labels = layer_styling.number_labels("discharge_layer0") diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 27a2b9b..616f913 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -13,12 +13,13 @@ import json from pathlib import Path from shutil import copy -from typing import List, Set +from typing import Dict, List, Set from PyQt5.QtCore import Qt from PyQt5.QtWidgets import ( QAbstractItemView, QCheckBox, + QComboBox, QFileDialog, QHBoxLayout, QHeaderView, @@ -49,7 +50,7 @@ "Circular Area Sink", "Impermeable Line Doublet", "Leaky Line Doublet", - "Observation", + "Head Observation", ] ) @@ -245,7 +246,10 @@ def __init__(self, parent): self.new_geopackage_button = QPushButton("New") self.open_geopackage_button = QPushButton("Open") self.copy_geopackage_button = QPushButton("Copy") - self.remove_button = QPushButton("Remove from Dataset") + self.transient_combo_box = QComboBox() + self.transient_combo_box.addItems(["Steady-state", "Transient"]) + self.transient_combo_box.currentTextChanged.connect(self.on_transient_changed) + self.remove_button = QPushButton("Remove from GeoPackage") self.add_button = QPushButton("Add to QGIS") self.new_geopackage_button.clicked.connect(self.new_geopackage) self.open_geopackage_button.clicked.connect(self.open_geopackage) @@ -254,10 +258,11 @@ def __init__(self, parent): self.suppress_popup_checkbox.stateChanged.connect(self.suppress_popup_changed) self.remove_button.clicked.connect(self.remove_geopackage_layer) self.add_button.clicked.connect(self.add_selection_to_qgis) - self.convert_button = QPushButton("Convert GeoPackage to Python script") + self.convert_button = QPushButton("Export to Python script") self.convert_button.clicked.connect(self.convert_to_python) # Layout dataset_layout = QVBoxLayout() + mode_row = QHBoxLayout() dataset_row = QHBoxLayout() layer_row = QHBoxLayout() dataset_row.addWidget(self.dataset_line_edit) @@ -265,6 +270,8 @@ def __init__(self, parent): dataset_row.addWidget(self.new_geopackage_button) dataset_row.addWidget(self.copy_geopackage_button) dataset_layout.addLayout(dataset_row) + mode_row.addWidget(self.transient_combo_box) + dataset_layout.addLayout(mode_row) dataset_layout.addWidget(self.dataset_tree) dataset_layout.addWidget(self.suppress_popup_checkbox) layer_row.addWidget(self.add_button) @@ -328,13 +335,14 @@ def load_geopackage(self) -> None: for element in elements: self.dataset_tree.add_element(element) + transient = self.transient for item in self.dataset_tree.items(): self.add_item_to_qgis(item) - item.element.on_transient_changed(self.parent.transient) + item.element.on_transient_changed(transient) self.dataset_tree.sortByColumn(0, Qt.SortOrder.AscendingOrder) self.parent.toggle_element_buttons(True) - self.parent.on_transient_changed() + self.on_transient_changed() return def new_geopackage(self) -> None: @@ -400,6 +408,14 @@ def remove_geopackage_layer(self) -> None: self.dataset_tree.remove_geopackage_layers() return + @property + def transient(self) -> bool: + return self.transient_combo_box.currentText() == "Transient" + + def on_transient_changed(self) -> None: + self.dataset_tree.on_transient_changed(self.transient) + return + def suppress_popup_changed(self): suppress = self.suppress_popup_checkbox.isChecked() for item in self.dataset_tree.items(): @@ -434,10 +450,6 @@ def selection_names(self) -> Set[str]: selection.append(item.assoc_item) return set([item.element.name for item in selection]) - def on_transient_changed(self, transient: bool) -> None: - self.dataset_tree.on_transient_changed(transient) - return - def add_element(self, element) -> None: self.dataset_tree.add_element(element) return @@ -477,7 +489,13 @@ def convert_to_python(self, transient: bool) -> None: ) return - def convert_to_json(self, path: str, cellsize: float, transient: bool) -> bool: + def convert_to_json( + self, + path: str, + cellsize: float, + transient: bool, + output_options: Dict[str, bool], + ) -> bool: """ Parameters ---------- @@ -497,7 +515,7 @@ def convert_to_json(self, path: str, cellsize: float, transient: bool) -> bool: if data is None: return True - json_data = to_json(data, cellsize=cellsize) + json_data = to_json(data, cellsize=cellsize, output_options=output_options) crs = self.parent.crs organization, srs_id = crs.authid().split(":") json_data["crs"] = { diff --git a/plugin/qgistim/widgets/tim_widget.py b/plugin/qgistim/widgets/tim_widget.py index 7d7ccc4..a26c829 100644 --- a/plugin/qgistim/widgets/tim_widget.py +++ b/plugin/qgistim/widgets/tim_widget.py @@ -37,7 +37,7 @@ def __init__( self.root = root self.subgroups = {} self.create_group() - + def create_group(self): self.group = self.root.addGroup(self.name) return @@ -184,9 +184,9 @@ def __init__(self, parent, iface): self.layout = QVBoxLayout() self.tabwidget = QTabWidget() self.layout.addWidget(self.tabwidget) - self.tabwidget.addTab(self.dataset_widget, "GeoPackage") + self.tabwidget.addTab(self.dataset_widget, "Model Manager") self.tabwidget.addTab(self.elements_widget, "Elements") - self.tabwidget.addTab(self.compute_widget, "Compute") + self.tabwidget.addTab(self.compute_widget, "Results") self.tabwidget.addTab(self.extraction_widget, "Extract") self.layout.addWidget(self.config_button, stretch=0, alignment=Qt.AlignRight) self.setLayout(self.layout) @@ -244,11 +244,6 @@ def shutdown_server(self) -> None: self.server_handler.kill() return - def on_transient_changed(self) -> None: - transient = self.compute_widget.transient - self.dataset_widget.on_transient_changed(transient) - return - @property def path(self) -> str: return self.dataset_widget.path @@ -303,19 +298,15 @@ def add_element(self, element: Any): # QGIS layers # ----------- - def create_input_group(self, name: str): + def create_input_group(self, name: str) -> None: root = QgsProject.instance().layerTreeRoot() self.input_group = LayersPanelGroup(root, f"{name} input") self.input_group.create_subgroup("timml") self.input_group.create_subgroup("ttim") return - def create_output_group(self, name: str): + def create_output_group(self, name: str) -> None: root = QgsProject.instance().layerTreeRoot() self.output_group = LayersPanelGroup(root, f"{name} output") - # Pre-create the groups here to make sure the vector group ends up on top. - # Apparently moving it destroys the group? - self.output_group.create_subgroup("vector") - self.output_group.create_subgroup("mesh") - self.output_group.create_subgroup("raster") return + \ No newline at end of file From 63c1aa1d3e3c37b35bd3f35a3974accb84de5006 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 18:35:13 +0200 Subject: [PATCH 18/41] Create subgroups to get order right --- plugin/qgistim/widgets/tim_widget.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/plugin/qgistim/widgets/tim_widget.py b/plugin/qgistim/widgets/tim_widget.py index a26c829..e7d3e77 100644 --- a/plugin/qgistim/widgets/tim_widget.py +++ b/plugin/qgistim/widgets/tim_widget.py @@ -308,5 +308,10 @@ def create_input_group(self, name: str) -> None: def create_output_group(self, name: str) -> None: root = QgsProject.instance().layerTreeRoot() self.output_group = LayersPanelGroup(root, f"{name} output") + # Pre-create the groups here to make sure the vector group ends up on top. + # Apparently moving it destroys the group? + self.output_group.create_subgroup("vector") + self.output_group.create_subgroup("mesh") + self.output_group.create_subgroup("raster") return \ No newline at end of file From ac917035467b7280fd53e98f4a98dbe6ba28a3be Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 18:36:43 +0200 Subject: [PATCH 19/41] Add method to clear outdated output, change output checkbox defaults --- gistim/geopackage.py | 18 +++ plugin/qgistim/core/processing.py | 10 +- plugin/qgistim/widgets/compute_widget.py | 139 +++++++++++------------ 3 files changed, 92 insertions(+), 75 deletions(-) diff --git a/gistim/geopackage.py b/gistim/geopackage.py index 163bac6..041598b 100644 --- a/gistim/geopackage.py +++ b/gistim/geopackage.py @@ -122,6 +122,24 @@ def process_table(dataframe: pd.DataFrame) -> Tuple[pd.DataFrame, BoundingBox, s def write_geopackage( tables: Dict[str, pd.DataFrame], crs: CoordinateReferenceSystem, path: Path ) -> None: + """ + Write the content of the tables to a geopackage. Overwrites the geopackage + if it already exists. + + Parameters + ---------- + tables: Dict[str, pd.DataFrame] + Mapping of table name to contents, as a dataframe. + May be an empty dictionary. + crs: CoordinateReferenceSystem + Coordinate reference system of the geometries. + path: Path + Path to the geopackage to write. + + Returns + ------- + None + """ # We write all tables to a temporary GeoPackage with a dot prefix, and at # the end move this over the target file. This does not throw a # PermissionError if the file is open in QGIS. diff --git a/plugin/qgistim/core/processing.py b/plugin/qgistim/core/processing.py index 59edffa..fbfeba9 100644 --- a/plugin/qgistim/core/processing.py +++ b/plugin/qgistim/core/processing.py @@ -5,6 +5,7 @@ into line contours. """ import datetime +from pathlib import Path from typing import NamedTuple import numpy as np @@ -95,7 +96,7 @@ def steady_contours( feature_data.append(SteadyContourData(geom, value)) # Setup output layer - contour_layer = QgsVectorLayer("Linestring", f"contours-{name}", "memory") + contour_layer = QgsVectorLayer("Linestring", name, "memory") contour_layer.setCrs(layer.crs()) provider = contour_layer.dataProvider() provider.addAttributes( @@ -132,7 +133,7 @@ def transient_contours( contourer = QgsMeshContours(layer) # Setup output layer - contour_layer = QgsVectorLayer("Linestring", f"contours-{name}", "memory") + contour_layer = QgsVectorLayer("Linestring", name, "memory") contour_layer.setCrs(layer.crs()) provider = contour_layer.dataProvider() provider.addAttributes( @@ -214,10 +215,11 @@ def mesh_contours( else: vector_layer = steady_contours(layer, index, name, start, stop, step) + newfile = not Path(gpkg_path).exists() written_layer = geopackage.write_layer( path=gpkg_path, layer=vector_layer, - layername=vector_layer.name(), - newfile=False, + layername=name, + newfile=newfile, ) return written_layer diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index b038972..78cebe0 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -58,14 +58,16 @@ def finished(self, result): if result: self.push_success_message() + self.parent.clear_outdated_output(self.data["path"]) # Load whatever data is available in the geopackage. - self.parent.load_vector_result(self.data["path"]) + if self.data["head_observations"] or self.data["discharge"]: + self.parent.load_vector_result(self.data["path"]) - if self.data["load_mesh"]: + if self.data["mesh"]: self.parent.load_mesh_result( - self.data["path"], self.data["load_contours"] + self.data["path"], self.data["contours"] ) - if self.data["load_raster"]: + if self.data["raster"]: self.parent.load_raster_result(self.data["path"]) else: @@ -87,8 +89,8 @@ def __init__(self, parent=None): self.compute_button = QPushButton("Compute") self.compute_button.clicked.connect(self.compute) - self.raster_checkbox = QCheckBox("Raster") self.mesh_checkbox = QCheckBox("Mesh") + self.raster_checkbox = QCheckBox("Raster") self.contours_checkbox = QCheckBox("Contours") self.head_observations_checkbox = QCheckBox("Head Observations") self.discharge_checkbox = QCheckBox("Discharge") @@ -101,11 +103,11 @@ def __init__(self, parent=None): self.cellsize_spin_box.setValue(25.0) self.domain_button.clicked.connect(self.domain) # By default: all output - self.raster_checkbox.setChecked(True) self.mesh_checkbox.setChecked(True) - self.contours_checkbox.setChecked(True) + self.raster_checkbox.setChecked(False) + self.contours_checkbox.setChecked(False) self.head_observations_checkbox.setChecked(True) - self.discharge_checkbox.setChecked(True) + self.discharge_checkbox.setChecked(False) self.mesh_checkbox.toggled.connect(self.contours_checkbox.setEnabled) self.mesh_checkbox.toggled.connect( @@ -158,8 +160,8 @@ def __init__(self, parent=None): button_row.addWidget(self.compute_button) result_layout.addLayout(output_row) - result_layout.addWidget(self.raster_checkbox) result_layout.addWidget(self.mesh_checkbox) + result_layout.addWidget(self.raster_checkbox) result_layout.addWidget(self.contours_checkbox) result_layout.addWidget(self.head_observations_checkbox) result_layout.addWidget(self.discharge_checkbox) @@ -192,11 +194,11 @@ def reset(self): self.cellsize_spin_box.setValue(25.0) self.transient_combo_box.setCurrentIndex(0) self.output_line_edit.setText("") - self.raster_checkbox.setCheckState(True) + self.raster_checkbox.setCheckState(False) self.mesh_checkbox.setCheckState(True) - self.contours_checkbox.setCheckState(True) + self.contours_checkbox.setCheckState(False) self.head_observations_checkbox.setCheckState(True) - self.discharge_checkbox.setCheckState(True) + self.discharge_checkbox.setCheckState(False) self.contour_min_box.setValue(-5.0) self.contour_max_box.setValue(5.0) self.contour_step_box.setValue(0.5) @@ -223,11 +225,24 @@ def add_contour_layer(self, layer) -> None: self.parent.output_group.add_layer( layer, "vector", renderer=renderer, on_top=True, labels=labels ) - + @property def output_path(self) -> str: return self.output_line_edit.text() + def clear_outdated_output(self, path: str) -> None: + path = Path(path) + gpkg_path = path.with_suffix(".output.gpkg") + netcdf_paths = (path.with_suffix(".nc"), path.with_suffix(".ugrid.nc")) + for layer in QgsProject.instance().mapLayers().values(): + source = layer.source() + if ( + Path(source) in netcdf_paths + or Path(source.partition("|")[0]) == gpkg_path + ): + QgsProject.instance().removeMapLayer(layer.id()) + return + def redraw_contours(self) -> None: path = Path(self.output_path) layer = self.contour_layer.currentLayer() @@ -235,17 +250,33 @@ def redraw_contours(self) -> None: index = renderer.activeScalarDatasetGroup() qgs_index = QgsMeshDatasetIndex(group=index, dataset=0) name = layer.datasetGroupMetadata(qgs_index).name() + contours_name = f"{path.stem}-contours-{name}" start, stop, step = self.contour_range() + gpkg_path = str(path.with_suffix(".output.gpkg")) + layer = mesh_contours( - gpkg_path=str(path.with_suffix(".output.gpkg")), + gpkg_path=gpkg_path, layer=layer, index=index, - name=name, + name=contours_name, start=start, stop=stop, step=step, ) - self.add_contour_layer(layer) + + # Re-use layer if it already exists. Otherwise add a new layer. + project_layers = { + layer.name(): layer for layer in QgsProject.instance().mapLayers().values() + } + project_layer = project_layers.get(contours_name) + if ( + (project_layer is not None) + and (project_layer.name() == layer.name()) + and (project_layer.source() == layer.source()) + ): + project_layer.reload() + else: + self.add_contour_layer(layer) return def set_output_path(self) -> None: @@ -295,13 +326,8 @@ def compute(self) -> None: "operation": "compute", "path": str(path), "transient": transient, - "load_raster": output_options["raster"], - "load_mesh": output_options["mesh"], - "load_contours": output_options["contours"], + **output_options, } - # import json - # print(json.dumps(data)) - # # https://gis.stackexchange.com/questions/296175/issues-with-qgstask-and-task-manager # It seems the task goes awry when not associated with a Python object! # -- we just assign it to the widget here. @@ -351,15 +377,6 @@ def load_mesh_result(self, path: Union[Path, str], load_contours: bool) -> None: path = Path(path) # String for QGIS functions netcdf_path = str(path.with_suffix(".ugrid.nc")) - # Loop through layers first. If the path already exists as a layer - # source, remove it. Otherwise QGIS will not the load the new result - # (this feels like a bug?). - for layer in QgsProject.instance().mapLayers().values(): - if Path(netcdf_path) == Path(layer.source()): - QgsProject.instance().removeMapLayer(layer.id()) - # Ensure the file is properly released by loading a dummy - QgsMeshLayer(str(self.dummy_ugrid_path), "", "mdal") - layer = QgsMeshLayer(netcdf_path, f"{path.stem}", "mdal") indexes = layer.datasetGroupsIndexes() contour_layers = [] @@ -389,7 +406,7 @@ def load_mesh_result(self, path: Union[Path, str], load_contours: bool) -> None: gpkg_path=str(path.with_suffix(".output.gpkg")), layer=index_layer, index=index, - name=name, + name=f"{path.stem}-contours-{name}", start=start, stop=stop, step=step, @@ -414,12 +431,6 @@ def steady_or_first(name: str) -> bool: # String for QGIS functions path = Path(path) raster_path = str(path.with_suffix(".nc")) - for layer in QgsProject.instance().mapLayers().values(): - if Path(raster_path) == Path(layer.source()): - QgsProject.instance().removeMapLayer(layer.id()) - - # contour, start, stop, step = self.contouring() - layer = QgsRasterLayer(raster_path, "", "gdal") bands = [i + 1 for i in range(layer.bandCount())] @@ -448,30 +459,17 @@ def steady_or_first(name: str) -> bool: def load_vector_result(self, path: Union[Path, str]) -> None: path = Path(path) - project_layers = { - layer.name(): layer for layer in QgsProject.instance().mapLayers().values() - } gpkg_path = path.with_suffix(".output.gpkg") if not gpkg_path.exists(): return for layername in geopackage.layers(str(gpkg_path)): - add = False layers_panel_name = f"{path.stem}-{layername}" - project_layer = project_layers.get(layers_panel_name) - if ( - project_layer is not None - and Path(project_layer.source().partition("|")[0]) == gpkg_path - ): - # Shares name and source. Just reload the layer. - layer = project_layer - layer.reload() - else: - layer = QgsVectorLayer( - f"{gpkg_path}|layername={layername}", layers_panel_name - ) - add = True + + layer = QgsVectorLayer( + f"{gpkg_path}|layername={layername}", layers_panel_name + ) # Set the temporal properties if it's a temporal layer temporal_properties = layer.temporalProperties() @@ -486,21 +484,20 @@ def load_vector_result(self, path: Union[Path, str]) -> None: else: temporal_properties.setIsActive(False) - if add: - if ( - "timml Head Observation:" in layername - or "ttim Head Observation" in layername - ): - labels = layer_styling.number_labels("head_layer0") - elif "discharge-" in layername: - labels = layer_styling.number_labels("discharge_layer0") - else: - labels = None - - _, element_type, _ = parse_name(layername) - renderer = ELEMENTS[element_type].renderer() - self.parent.output_group.add_layer( - layer, "vector", renderer=renderer, labels=labels - ) + if ( + "timml Head Observation:" in layername + or "ttim Head Observation" in layername + ): + labels = layer_styling.number_labels("head_layer0") + elif "discharge-" in layername: + labels = layer_styling.number_labels("discharge_layer0") + else: + labels = None + + _, element_type, _ = parse_name(layername) + renderer = ELEMENTS[element_type].renderer() + self.parent.output_group.add_layer( + layer, "vector", renderer=renderer, labels=labels + ) return From 4aa6e80ab8fa590c26366d8fa329c6c774740c85 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 18:36:56 +0200 Subject: [PATCH 20/41] Remove unused function --- gistim/compute.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/gistim/compute.py b/gistim/compute.py index 1c4664f..9712b42 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -112,12 +112,6 @@ def timml_head_observations( return pd.DataFrame(d) -def timml_discharges(elements: Dict[str, Any]) -> Dict[str, pd.DataFrame]: - discharges = {} - for layername, content in elements.items(): - first_element = content[0] - - def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.DataArray: """ Compute the headgrid of the TimML model, and store the results From d1f3f7203d01cb8b98be3224e81be03233814bc2 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 18:43:24 +0200 Subject: [PATCH 21/41] Use enum for popup suppression. Required for QGIS >=3.32 --- plugin/qgistim/widgets/tim_widget.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/plugin/qgistim/widgets/tim_widget.py b/plugin/qgistim/widgets/tim_widget.py index e7d3e77..ca80967 100644 --- a/plugin/qgistim/widgets/tim_widget.py +++ b/plugin/qgistim/widgets/tim_widget.py @@ -15,7 +15,14 @@ QVBoxLayout, QWidget, ) -from qgis.core import Qgis, QgsApplication, QgsMapLayer, QgsProject, QgsUnitTypes +from qgis.core import ( + Qgis, + QgsApplication, + QgsEditFormConfig, + QgsMapLayer, + QgsProject, + QgsUnitTypes, +) from qgistim.core.server_handler import ServerHandler from qgistim.core.task import BaseServerTask from qgistim.widgets.compute_widget import ComputeWidget @@ -37,7 +44,7 @@ def __init__( self.root = root self.subgroups = {} self.create_group() - + def create_group(self): self.group = self.root.addGroup(self.name) return @@ -132,8 +139,11 @@ def add_layer( maplayer = QgsProject.instance().addMapLayer(layer, False) if suppress is not None: config = maplayer.editFormConfig() - config.setSuppress(suppress) - maplayer.setEditFormConfig(config) + config.setSuppress( + QgsEditFormConfig.SuppressOn + if suppress + else QgsEditFormConfig.SuppressDefault + ) if renderer is not None: maplayer.setRenderer(renderer) if labels is not None: @@ -314,4 +324,3 @@ def create_output_group(self, name: str) -> None: self.output_group.create_subgroup("mesh") self.output_group.create_subgroup("raster") return - \ No newline at end of file From d4e27b1ee89720da4c8a45d6e9b1af3f222a4ab8 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 19:01:51 +0200 Subject: [PATCH 22/41] Option Dialog -> Version dialog. Do nothing if no mesh layer is available in redraw contours. Fixes #130 --- plugin/qgistim/widgets/compute_widget.py | 2 + plugin/qgistim/widgets/options_dialog.py | 130 ---------------------- plugin/qgistim/widgets/settings_widget.py | 121 -------------------- plugin/qgistim/widgets/tim_widget.py | 10 +- plugin/qgistim/widgets/version_dialog.py | 57 ++++++++++ 5 files changed, 64 insertions(+), 256 deletions(-) delete mode 100644 plugin/qgistim/widgets/options_dialog.py delete mode 100644 plugin/qgistim/widgets/settings_widget.py create mode 100644 plugin/qgistim/widgets/version_dialog.py diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 78cebe0..b73d838 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -246,6 +246,8 @@ def clear_outdated_output(self, path: str) -> None: def redraw_contours(self) -> None: path = Path(self.output_path) layer = self.contour_layer.currentLayer() + if layer is None: + return renderer = layer.rendererSettings() index = renderer.activeScalarDatasetGroup() qgs_index = QgsMeshDatasetIndex(group=index, dataset=0) diff --git a/plugin/qgistim/widgets/options_dialog.py b/plugin/qgistim/widgets/options_dialog.py deleted file mode 100644 index 0bfb5ce..0000000 --- a/plugin/qgistim/widgets/options_dialog.py +++ /dev/null @@ -1,130 +0,0 @@ -import subprocess -import sys -from functools import partial - -from PyQt5.QtCore import Qt -from PyQt5.QtWidgets import ( - QComboBox, - QDialog, - QGridLayout, - QLabel, - QLineEdit, - QMessageBox, - QPushButton, - QVBoxLayout, -) -from qgistim.core.server_handler import ServerHandler - -INSTALL_COMMANDS = { - "Git": "{interpreter} -m pip install git+{repo_url} {package}", - "conda-forge": "mamba install -c conda-forge {package}", -} -INSTALL_VERSION_COMMANDS = { - "Git": INSTALL_COMMANDS["Git"] + "@{version}", - "conda-forge": INSTALL_COMMANDS["conda-forge"] + "={version}", -} - -REPOSITORY_URLS = { - "timml": "https://github.com/mbakker7/timml.git", - "ttim": "https://github.com/mbakker7/ttim.git", - "gistim": "https://github.com/Deltares/QGIS-Tim.git", -} - - -class NotSupportedDialog(QMessageBox): - def __init__(self, parent=None, command: str = ""): - QMessageBox.__init__(self, parent) - self.setWindowTitle("Install unsupported") - self.setIcon(QMessageBox.Information) - self.setText( - "Installing new versions via this menu is not (yet) supported for " - "this operating system. Please run the following command in the " - f"appropriate command line:\n\n{command}" - ) - return - - -class OptionsDialog(QDialog): - def __init__(self, parent=None): - QDialog.__init__(self, parent) - self.setWindowTitle("QGIS-Tim Options") - - self.install_task = None - self.close_button = QPushButton("Close") - self.close_button.clicked.connect(self.reject) - - self.interpreter_combo_box = QComboBox() - self.interpreter_combo_box.insertItems(0, ServerHandler.interpreters()) - self.interpreter_combo_box.currentIndexChanged.connect( - self.on_interpreter_changed - ) - - layout = QVBoxLayout() - version_layout = QGridLayout() - self.version_widgets = {} - for i, package in enumerate(["timml", "ttim", "gistim"]): - version_line_edit = QLineEdit() - version_line_edit.setText("Latest"), - version_view = QLineEdit() - version_view.setEnabled(False) - origin_combo_box = QComboBox() - install_button = QPushButton("Install") - install_button.clicked.connect(partial(self.install, package=package)) - origin_combo_box.insertItems(0, ["conda-forge", "Git"]) - widgets = ( - QLabel(package), - version_view, - origin_combo_box, - version_line_edit, - install_button, - ) - for j, widget in enumerate(widgets): - version_layout.addWidget(widget, i, j) - self.version_widgets[package] = widgets - - layout.addWidget(self.interpreter_combo_box) - layout.addLayout(version_layout) - layout.addWidget(self.close_button, stretch=0, alignment=Qt.AlignRight) - layout.addStretch() - self.setLayout(layout) - self.on_interpreter_changed() - - def install(self, package: str): - interpreter = self.interpreter_combo_box.currentText() - _, _, origin_combo_box, version_line_edit, _ = self.version_widgets[package] - origin = origin_combo_box.currentText() - version = version_line_edit.text() - - lowered_version = version.lower().strip() - url = REPOSITORY_URLS[package] - if lowered_version in ("latest", ""): - command = INSTALL_COMMANDS[origin] - else: - command = INSTALL_VERSION_COMMANDS[origin] - - command = command.format( - interpreter=interpreter, - package=package, - version=version, - repo_url=url, - ) - env_vars = ServerHandler.environmental_variables()[interpreter] - - if sys.platform == "win32": - subprocess.Popen( - f"cmd.exe /k {command}", - creationflags=subprocess.CREATE_NEW_CONSOLE, - env=env_vars, - text=True, - ) - else: - NotSupportedDialog(self, command).show() - - return - - def on_interpreter_changed(self): - versions = ServerHandler.versions() - interpreter = self.interpreter_combo_box.currentText() - for package in ["timml", "ttim", "gistim"]: - self.version_widgets[package][1].setText(versions[interpreter][package]) - return diff --git a/plugin/qgistim/widgets/settings_widget.py b/plugin/qgistim/widgets/settings_widget.py deleted file mode 100644 index 760f674..0000000 --- a/plugin/qgistim/widgets/settings_widget.py +++ /dev/null @@ -1,121 +0,0 @@ -import subprocess -from functools import partial - -from PyQt5.QtCore import Qt -from PyQt5.QtWidgets import ( - QComboBox, - QDialog, - QGridLayout, - QLabel, - QLineEdit, - QPushButton, - QVBoxLayout, -) -from qgis.core import QgsApplication -from qgistim.core.server_handler import ServerHandler -from qgistim.core.task import BaseServerTask - -INSTALL_COMMANDS = { - "Git": "{interpreter} -m pip install git+{repo_url} {package}", - "conda-forge": "mamba install -c conda-forge {package}", -} -INSTALL_VERSION_COMMANDS = { - "Git": INSTALL_COMMANDS["Git"] + "@{version}", - "conda-forge": INSTALL_COMMANDS["conda-forge"] + "={version}", -} - - -class InstallTask(BaseServerTask): - @property - def task_description(self): - return "Updating package" - - def run(self): - try: - self.response = subprocess.run( - self.data["command"], - check=True, - creationflags=subprocess.CREATE_NEW_CONSOLE, - env=self.data["env_vars"], - ) - return True - except Exception as exception: - self.exception = exception - return False - - -class OptionsDialog(QDialog): - def __init__(self, parent=None) -> None: - QDialog.__init__(self, parent) - self.setWindowTitle("QGIS-Tim Options") - - self.install_task = None - self.close_button = QPushButton("Close") - self.interpreter_combo_box = QComboBox() - self.interpreter_combo_box.insertItems(0, ServerHandler.interpreters()) - self.interpreter_combo_box.currentIndexChanged.connect( - self.on_interpreter_changed - ) - - layout = QVBoxLayout() - version_layout = QGridLayout() - self.version_widgets = {} - - for i, package in enumerate(["timml", "ttim", "gistim"]): - version_line_edit = QLineEdit() - version_line_edit.setText("Latest"), - version_view = QLineEdit() - version_view.setEnabled(False) - origin_combo_box = QComboBox() - install_button = QPushButton("Install") - install_button.clicked.connect(partial(self.install, package=package)) - origin_combo_box.insertItems(0, ["conda-forge", "Git"]) - widgets = ( - QLabel(package), - version_view, - origin_combo_box, - version_line_edit, - install_button, - ) - for j, widget in enumerate(widgets): - version_layout.addWidget(widget, i, j) - - self.version_widgets[package] = widgets - - layout.addWidget(self.interpreter_combo_box) - layout.addLayout(version_layout) - layout.addWidget(self.close_button, stretch=0, alignment=Qt.AlignRight) - layout.addStretch() - self.setLayout(layout) - self.on_interpreter_changed() - - def install(self, package: str): - interpreter = self.interpreter_combo_box.currentText() - _, origin_combo_box, version_line_edit, _ = self.version_widgets[package] - origin = origin_combo_box.currentText() - version = version_line_edit.text() - - lowered_version = version.lower().strip() - if lowered_version in ("latest", ""): - command = INSTALL_COMMANDS[origin].format( - interpreter=interpreter, package=package - ) - else: - command = INSTALL_VERSION_COMMANDS[origin].format( - interpreter=interpreter, package=package, version=version - ) - - env_vars = ServerHandler.environmental_variables[interpreter] - self.install_task = InstallTask( - self, data={"command": command, "env_vars": env_vars} - ) - self.parent.set_interpreter_interaction(False) - QgsApplication.taskManager().addTask(self.install_task) - return - - def on_interpreter_changed(self): - versions = ServerHandler.versions() - interpreter = self.interpreter_combo_box.currentText() - for package in ["timml", "ttim", "gistim"]: - self.version_widgets[package][1].setText(versions[interpreter][package]) - return diff --git a/plugin/qgistim/widgets/tim_widget.py b/plugin/qgistim/widgets/tim_widget.py index ca80967..3622196 100644 --- a/plugin/qgistim/widgets/tim_widget.py +++ b/plugin/qgistim/widgets/tim_widget.py @@ -29,7 +29,7 @@ from qgistim.widgets.dataset_widget import DatasetWidget from qgistim.widgets.elements_widget import ElementsWidget from qgistim.widgets.extraction_widget import DataExtractionWidget -from qgistim.widgets.options_dialog import OptionsDialog +from qgistim.widgets.version_dialog import VersionDialog PYQT_DELETED_ERROR = "wrapped C/C++ object of type QgsLayerTreeGroup has been deleted" @@ -184,10 +184,10 @@ def __init__(self, parent, iface): self.dataset_widget = DatasetWidget(self) self.elements_widget = ElementsWidget(self) self.compute_widget = ComputeWidget(self) - self.options_dialog = OptionsDialog(self) + self.version_dialog = VersionDialog(self) - self.config_button = QPushButton("Options") - self.config_button.clicked.connect(self.options_dialog.show) + self.config_button = QPushButton("Versions") + self.config_button.clicked.connect(self.version_dialog.show) self.config_button.setIcon(QgsApplication.getThemeIcon("/mActionOptions.svg")) # Layout @@ -230,7 +230,7 @@ def reset(self): def start_interpreter_task(self) -> Union[StartTask, None]: if not self.server_handler.alive(): - interpreter = self.options_dialog.interpreter_combo_box.currentText() + interpreter = self.version_dialog.interpreter_combo_box.currentText() start_task = StartTask(self, {"interpreter": interpreter}, self.message_bar) return start_task else: diff --git a/plugin/qgistim/widgets/version_dialog.py b/plugin/qgistim/widgets/version_dialog.py new file mode 100644 index 0000000..c62699e --- /dev/null +++ b/plugin/qgistim/widgets/version_dialog.py @@ -0,0 +1,57 @@ +from PyQt5.QtCore import Qt +from PyQt5.QtWidgets import ( + QComboBox, + QDialog, + QGridLayout, + QLabel, + QLineEdit, + QPushButton, + QVBoxLayout, +) +from qgistim.core.server_handler import ServerHandler + + +class VersionDialog(QDialog): + def __init__(self, parent=None): + QDialog.__init__(self, parent) + self.setWindowTitle("QGIS-Tim Version Info") + + self.install_task = None + self.close_button = QPushButton("Close") + self.close_button.clicked.connect(self.reject) + + self.interpreter_combo_box = QComboBox() + self.interpreter_combo_box.insertItems(0, ServerHandler.interpreters()) + self.interpreter_combo_box.currentIndexChanged.connect( + self.on_interpreter_changed + ) + + layout = QVBoxLayout() + version_layout = QGridLayout() + self.version_widgets = {} + for i, package in enumerate(["timml", "ttim", "gistim"]): + version_line_edit = QLineEdit() + version_line_edit.setText("Latest"), + version_view = QLineEdit() + version_view.setEnabled(False) + widgets = ( + QLabel(package), + version_view, + ) + for j, widget in enumerate(widgets): + version_layout.addWidget(widget, i, j) + self.version_widgets[package] = widgets + + layout.addWidget(self.interpreter_combo_box) + layout.addLayout(version_layout) + layout.addWidget(self.close_button, stretch=0, alignment=Qt.AlignRight) + layout.addStretch() + self.setLayout(layout) + self.on_interpreter_changed() + + def on_interpreter_changed(self): + versions = ServerHandler.versions() + interpreter = self.interpreter_combo_box.currentText() + for package in ["timml", "ttim", "gistim"]: + self.version_widgets[package][1].setText(versions[interpreter][package]) + return From 0cadfe0c406ebeb7bcc9b156ddf3d2eb39321f65 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 19:12:14 +0200 Subject: [PATCH 23/41] Round contour values to 3 digits. Fixes #113 --- plugin/qgistim/core/processing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugin/qgistim/core/processing.py b/plugin/qgistim/core/processing.py index fbfeba9..502b4f8 100644 --- a/plugin/qgistim/core/processing.py +++ b/plugin/qgistim/core/processing.py @@ -112,7 +112,7 @@ def steady_contours( f.setGeometry(item.geometry) # Make sure to convert to the appropriate Qt types # e.g. no numpy floats allowed - f.setAttributes([float(item.head)]) + f.setAttributes([round(float(item.head), 3)]) provider.addFeature(f) contour_layer.updateExtents() @@ -181,7 +181,7 @@ def transient_contours( # e.g. no numpy floats allowed f.setAttributes( [ - float(item.head), + float(round(item.head, 3)), QDateTime(item.datetime), QDateTime(end_dates[item.datetime]), ] @@ -214,7 +214,7 @@ def mesh_contours( vector_layer = transient_contours(layer, index, name, start, stop, step) else: vector_layer = steady_contours(layer, index, name, start, stop, step) - + newfile = not Path(gpkg_path).exists() written_layer = geopackage.write_layer( path=gpkg_path, From a33cb84b4f415f86b74c89569377ed48b3839cdc Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Tue, 24 Oct 2023 19:43:56 +0200 Subject: [PATCH 24/41] Fix #22 --- plugin/qgistim/widgets/compute_widget.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index b73d838..0c8acd3 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -60,13 +60,11 @@ def finished(self, result): self.parent.clear_outdated_output(self.data["path"]) # Load whatever data is available in the geopackage. - if self.data["head_observations"] or self.data["discharge"]: + if self.data["head_observations"] or self.data["discharge"]: self.parent.load_vector_result(self.data["path"]) if self.data["mesh"]: - self.parent.load_mesh_result( - self.data["path"], self.data["contours"] - ) + self.parent.load_mesh_result(self.data["path"], self.data["contours"]) if self.data["raster"]: self.parent.load_raster_result(self.data["path"]) @@ -125,12 +123,13 @@ def __init__(self, parent=None): self.contour_min_box = QDoubleSpinBox() self.contour_max_box = QDoubleSpinBox() self.contour_step_box = QDoubleSpinBox() + self.contour_max_box.setMaximum(1000.0) + self.contour_max_box.setValue(5.0) + # Ensure the maximum cannot dip below the min box value. + self.contour_min_box.valueChanged.connect(self.set_minimum_contour_stop) self.contour_min_box.setMinimum(-1000.0) self.contour_min_box.setMaximum(1000.0) self.contour_min_box.setValue(-5.0) - self.contour_max_box.setMinimum(-1000.0) - self.contour_max_box.setMaximum(1000.0) - self.contour_max_box.setValue(5.0) self.contour_step_box.setSingleStep(0.1) self.contour_step_box.setValue(0.5) @@ -204,6 +203,9 @@ def reset(self): self.contour_step_box.setValue(0.5) return + def set_minimum_contour_stop(self) -> None: + self.contour_max_box.setMinimum(self.contour_min_box.value() + 0.05) + def set_interpreter_interaction(self, value: bool): self.parent.set_interpreter_interaction(value) From e812f6849e16d13b71ed2ecb1ce658e2972d53b9 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Wed, 25 Oct 2023 11:43:00 +0200 Subject: [PATCH 25/41] Vendor geomet ourselves --- gistim/geomet/__init__.py | 17 + gistim/geomet/geopackage.py | 410 ++++++++++++++++ gistim/geomet/util.py | 127 +++++ gistim/geomet/wkb.py | 936 ++++++++++++++++++++++++++++++++++++ gistim/geopackage.py | 5 +- 5 files changed, 1493 insertions(+), 2 deletions(-) create mode 100644 gistim/geomet/__init__.py create mode 100644 gistim/geomet/geopackage.py create mode 100644 gistim/geomet/util.py create mode 100644 gistim/geomet/wkb.py diff --git a/gistim/geomet/__init__.py b/gistim/geomet/__init__.py new file mode 100644 index 0000000..9a13841 --- /dev/null +++ b/gistim/geomet/__init__.py @@ -0,0 +1,17 @@ +# These modules are copied from: https://github.com/geomet/geomet +# Which has the following copyright notice: +# +# +# Copyright 2013 Lars Butler & individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/gistim/geomet/geopackage.py b/gistim/geomet/geopackage.py new file mode 100644 index 0000000..e46caab --- /dev/null +++ b/gistim/geomet/geopackage.py @@ -0,0 +1,410 @@ +# Copyright 2020 Tom Caruso & individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct as _struct + +from gistim.geomet import wkb as _wkb +from gistim.geomet.util import as_bin_str as _as_bin_str +from gistim.geomet.util import endian_token as _endian_token +from gistim.geomet.util import take as _take + + +def dump(obj, dest_file, big_endian=True): + """ + Dump GeoJSON-like `dict` to GeoPackage binary + and write it to the `dest_file`. + + :param dict obj: + A GeoJSON-like dictionary. It must at least the keys 'type' and + 'coordinates'. + :param dest_file: + Open and writable file-like object. + :param bool big_endian: + specify endianess of the dumped object. + + :return: + """ + dest_file.write(dumps(obj, big_endian)) + + +def load(source_file): + """ + Load a GeoJSON `dict` object from a ``source_file`` containing + GeoPackage (as a byte string). + + :param source_file: + Open and readable file-like object. + + :return: + A GeoJSON `dict` representing the geometry read from the file. + """ + return loads(source_file.read()) + + +def dumps(obj, big_endian=True): + """ + Dump a GeoJSON-like dict to a GeoPackage bytestring. + + + If the dict contains a top-level 'meta' key like so: + + ``` + 'meta': {'srid': 4326} + ``` + then the srid will be added to the geopackage header, but *not* + to the WKB geometry header. + + + If the dict contains a top-level 'bbox' key like so: + + ``` + 'bbox': [0, 0, 3, 3] + ``` + + Then an envelope will be added to the geopackage header + with this information. + + + If the geometry's coordinates are empty (an empty list) + then the geopackage header's "empty" flag will be set, + denoting that this geometry has no coordinates. + + Please note that while this library can parse geopackages + with a mixed byte-order in the header, it will only produce + blobs with consistent byte order (albeit properly marked as such). + That means you cannot product a geopackage with e.g. little-endian + header and big-endian WKB geometry. + + :param dict obj: + The geojson geometry to dump + :param bool big_endian: + if True, the geopackage binary will use big-endian + byte order, little-endian otherwise. + + :return bytes: + bytestring representing the geometry in geopackage + format. + """ + header = _build_geopackage_header(obj, not big_endian) + result = _wkb._dumps(obj, big_endian, include_meta=False) + return header + result + + +def loads(string): + """ + Construct a GeoJSON `dict` from geopackage (string). + + This function strips the geopackage header from the + string and passes the remaining WKB geometry to the + `geomet.wkb.loads` function. + + The envelope, if present, is added to the GeoJSON as + a key called 'bbox' as per the GeoJSON spec, [1]. + + If an SRID is specified in the geopackage header + AND the wkb header, the SRID in the geopackage header + will take precedence and will replace that SRID + in the returned dict. + + [1] https://tools.ietf.org/html/rfc7946#section-5 + + :param bytes string: + geopackage byte string. + :return dict: + GeoJSON represented the parsed geopackage binary. + """ + string = iter(string) + + header = _as_bin_str(_take(_GeoPackage.HEADER_LEN, string)) + + _check_is_valid(header) + g, p, version, empty, envelope_indicator, is_little_endian, srid = _parse_header( + header + ) + + wkb_offset = _get_wkb_offset(envelope_indicator) + left_to_take = wkb_offset - _GeoPackage.HEADER_LEN + envelope_data = _as_bin_str(_take(left_to_take, string)) + + if envelope_data: + envelope = _parse_envelope(envelope_indicator, envelope_data, is_little_endian) + + result = _wkb.loads(string) + + if srid: + result["meta"] = {"srid": int(srid)} + result["crs"] = { + "type": "name", + "properties": {"name": "EPSG%s" % srid}, + } + + if envelope_data: + result["bbox"] = envelope + + return result + + +class _GeoPackage: + """ + Much more information on geopackage structure + can be found here: http://www.geopackage.org/spec/#gpb_format + """ + + # The ascii letter 'G' + MAGIC1 = 0x47 + # The ascii letter 'P' + MAGIC2 = 0x50 + VERSION1 = 0x00 + HEADER_LEN = 8 + HEADER_PACK_FMT = "BBBBI" + ENVELOPE_2D_LEN = 32 + ENVELOPE_3D_LEN = 48 + ENVELOPE_4D_LEN = 64 + ENVELOPE_MASK = 0b00001111 + EMPTY_GEOM_MASK = 0b00011111 + ENDIANNESS_MASK = 0b00000001 + + +# map the "envelope indicator" integer we get out of the geopackage header +# to the dimensionality of the envelope. +# more info here: http://www.geopackage.org/spec/#gpb_format +# in the "flags" section, bits 3, 2, 1. +_indicator_to_dim = { + 0: 0, + 1: 4, + 2: 6, + 3: 6, + 4: 8, +} + +# Map the dimensionality of our envelope to the indicator +# integer we will use in the geopackage binary header. +# because we have no way to tell between Z and M values, +# if the geometry has 3 dimensions we default to assume Z. +_dim_to_indicator = {0: 0, 4: 1, 6: 2, 8: 4} + + +def is_valid(data): + """ + Check if the data represents a valid geopackage + geometry. Input can be either the full geometry or + just the header. + + :param bytes data: + bytes representing the geopackage binary. + + :return (bool, str): + Is the geopackage valid, if not, string describing why + """ + g, p, version, _, envelope_indicator, _, _ = _parse_header(data[:8]) + if (g != _GeoPackage.MAGIC1) or (p != _GeoPackage.MAGIC2): + return False, "Missing Geopackage header magic bytes" + if version != _GeoPackage.VERSION1: + return False, "Geopackage version must be 0" + if (envelope_indicator < 0) or (envelope_indicator > 4): + return False, "Envelope indicator must be between 0-4" + return True, "" + + +def _header_is_little_endian(header): + """ + Check to see if the header is encoded + as little endian or big endian. + + Either the entire binary blob or + just the header can be passed in. + + :param bytes header: + geopackage header or binary blob + + :return bool: is the header little endian + """ + (flags,) = _struct.unpack("B", header[3:4]) + return flags & _GeoPackage.ENDIANNESS_MASK + + +def _parse_header(header): + """ + Unpack all information from the geopackage + header, including "magic" GP bytes. Returns + all of them so we can confirm that this + geopackage is validly formed. Can also accept + the full binary blob. + + :param header: + the header or the full geometry. + + :return 7-tuple: + all attributes stored in the binary header. + """ + is_little_endian = _header_is_little_endian(header) + fmt = _endian_token(is_little_endian) + _GeoPackage.HEADER_PACK_FMT + + g, p, version, flags, srid = _struct.unpack(fmt, header[: _GeoPackage.HEADER_LEN]) + empty, envelope_indicator, endianness = _parse_flags(flags) + return g, p, version, empty, envelope_indicator, endianness, srid + + +def _parse_flags(flags): + """ + Parse the bits in the "flags" byte + of the geopackage header to retrieve + useful information. We specifically parse + the endianness, the envelope indicator, + and the "empty" flag. + + Much more info can be found in + the documentation [1]. + + [1] http://www.geopackage.org/spec/#gpb_format + :param byte flags: + The "flags" byte of a geopackage header. + + :return tuple: + """ + endianness = flags & _GeoPackage.ENDIANNESS_MASK + envelope_indicator = (flags & _GeoPackage.ENVELOPE_MASK) >> 1 + empty = (flags & _GeoPackage.EMPTY_GEOM_MASK) >> 4 + return empty, envelope_indicator, endianness + + +def _build_flags(empty, envelope_indicator, is_little_endian=1): + """ + Create the "flags" byte which goes into + the geopackage header. Much more info + can be found in the documentation [1]. + + [1] http://www.geopackage.org/spec/#gpb_format + + :param int empty: + 0 or 1 indicating whether the geometry is empty. + True and False also work as expected. + :param int envelope_indicator: + indicates the dimensionality of the envelope. + :param int is_little_endian: + 0 or 1 (or False / True) indicating + whether the header should be + little-endian encoded. + + :return byte: + geopackage header flags + """ + flags = 0b0 + if empty: + flags = (flags | 1) << 3 + if envelope_indicator: + flags = flags | envelope_indicator + + return (flags << 1) | is_little_endian + + +def _build_geopackage_header(obj, is_little_endian): + """ + Create the geopackage header for the input object. + Looks for a 'bbox' key on the geometry to use + for an envelope, and a 'meta' key with an + SRID to encode into the header. + + :param dict obj: + a geojson object + :param bool is_little_endian: + which endianness to use when + encoding the data. + + :return bytes: geopackage header. + """ + # Collect geometry metadata. + empty = 1 if len(obj["coordinates"]) == 0 else 0 + envelope = obj.get("bbox", []) + srid = obj.get("meta", {}).get("srid", 0) + + try: + envelope_indicator = _dim_to_indicator[len(envelope)] + except KeyError: + raise ValueError( + "Bounding box must be of length 2*n where " + "n is the number of dimensions represented " + "in the contained geometries." + ) + + pack_args = [ + _GeoPackage.MAGIC1, + _GeoPackage.MAGIC2, + _GeoPackage.VERSION1, + # This looks funny, but _build_flags wants a 1 or 0 for + # "little endian" because it uses it to `or` with the bits. + # Conveniently, in Python, False == 0 and True == 1, so + # we can pass the boolean right in and it works as expected. + _build_flags(empty, envelope_indicator, is_little_endian), + srid, + ] + + pack_fmt = _endian_token(is_little_endian) + _GeoPackage.HEADER_PACK_FMT + + # This has no effect if we have a 0 envelope indicator. + pack_fmt += "d" * _indicator_to_dim[envelope_indicator] + pack_args.extend(envelope) + + return _struct.pack(pack_fmt, *pack_args) + + +def _check_is_valid(data): + """ + Raise if the header is not valid geopackage. + + :param bytes data: Geopackage data or header. + + :return None: + """ + valid, reason = is_valid(data) + if not valid: + raise ValueError( + "Could not read Geopackage geometry " "because of errors: " + reason + ) + + +def _get_wkb_offset(envelope_indicator): + """ + Get the full byte offset at which the WKB geometry lies + in the geopackage geometry. + + :param int envelope_indicator: + indicates the dimensionality of the envelope. + + :return int: + number of bytes until the beginning of the + WKB geometry. + + """ + base_len = _GeoPackage.HEADER_LEN + return (base_len * _indicator_to_dim[envelope_indicator]) + base_len + + +def _parse_envelope(envelope_indicator, envelope, is_little_endian): + """ + Parse a geopackage envelope bytestring into an n-tuple + of floats. + + :param int envelope_indicator: + indicates the dimensionality of the envelope. + :param bytes envelope: + Bytestring of the envelope values. + :param bool is_little_endian: + how to pack the bytes in the envelope. + + :return tuple[float]: Geometry envelope. + """ + pack_fmt = _endian_token(is_little_endian) + pack_fmt += "d" * _indicator_to_dim[envelope_indicator] + return _struct.unpack(pack_fmt, envelope) diff --git a/gistim/geomet/util.py b/gistim/geomet/util.py new file mode 100644 index 0000000..804b112 --- /dev/null +++ b/gistim/geomet/util.py @@ -0,0 +1,127 @@ +# Copyright 2013 Lars Butler & individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import collections.abc as collections +import itertools + + +def block_splitter(data, block_size): + """ + Creates a generator by slicing ``data`` into chunks of ``block_size``. + + >>> data = range(10) + >>> list(block_splitter(data, 2)) + [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]] + + If ``data`` cannot be evenly divided by ``block_size``, the last block will + simply be the remainder of the data. Example: + + >>> data = range(10) + >>> list(block_splitter(data, 3)) + [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] + + If the ``block_size`` is greater than the total length of ``data``, a + single block will be generated: + + >>> data = range(3) + >>> list(block_splitter(data, 4)) + [[0, 1, 2]] + + :param data: + Any iterable. If ``data`` is a generator, it will be exhausted, + obviously. + :param int block_site: + Desired (maximum) block size. + """ + buf = [] + for i, datum in enumerate(data): + buf.append(datum) + if len(buf) == block_size: + yield buf + buf = [] + + # If there's anything leftover (a partial block), + # yield it as well. + if buf: + yield buf + + +def take(n, iterable): + """ + Return first n items of the iterable as a list + + Copied shamelessly from + http://docs.python.org/2/library/itertools.html#recipes. + """ + return list(itertools.islice(iterable, n)) + + +def as_bin_str(a_list): + return bytes(a_list) + + +def round_geom(geom, precision=None): + """Round coordinates of a geometric object to given precision.""" + if geom["type"] == "Point": + x, y = geom["coordinates"] + xp, yp = [x], [y] + if precision is not None: + xp = [round(v, precision) for v in xp] + yp = [round(v, precision) for v in yp] + new_coords = tuple(zip(xp, yp))[0] + if geom["type"] in ["LineString", "MultiPoint"]: + xp, yp = zip(*geom["coordinates"]) + if precision is not None: + xp = [round(v, precision) for v in xp] + yp = [round(v, precision) for v in yp] + new_coords = tuple(zip(xp, yp)) + elif geom["type"] in ["Polygon", "MultiLineString"]: + new_coords = [] + for piece in geom["coordinates"]: + xp, yp = zip(*piece) + if precision is not None: + xp = [round(v, precision) for v in xp] + yp = [round(v, precision) for v in yp] + new_coords.append(tuple(zip(xp, yp))) + elif geom["type"] == "MultiPolygon": + parts = geom["coordinates"] + new_coords = [] + for part in parts: + inner_coords = [] + for ring in part: + xp, yp = zip(*ring) + if precision is not None: + xp = [round(v, precision) for v in xp] + yp = [round(v, precision) for v in yp] + inner_coords.append(tuple(zip(xp, yp))) + new_coords.append(inner_coords) + return {"type": geom["type"], "coordinates": new_coords} + + +def flatten_multi_dim(sequence): + """Flatten a multi-dimensional array-like to a single dimensional sequence + (as a generator). + """ + for x in sequence: + if isinstance(x, collections.Iterable) and not isinstance(x, str): + for y in flatten_multi_dim(x): + yield y + else: + yield x + + +def endian_token(is_little_endian): + if is_little_endian: + return "<" + else: + return ">" diff --git a/gistim/geomet/wkb.py b/gistim/geomet/wkb.py new file mode 100644 index 0000000..bb848f9 --- /dev/null +++ b/gistim/geomet/wkb.py @@ -0,0 +1,936 @@ +# Copyright 2013 Lars Butler & individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import binascii +import struct +from itertools import chain + +from gistim.geomet.util import as_bin_str, block_splitter, flatten_multi_dim, take + +#: '\x00': The first byte of any WKB string. Indicates big endian byte +#: ordering for the data. +BIG_ENDIAN = b"\x00" +#: '\x01': The first byte of any WKB string. Indicates little endian byte +#: ordering for the data. +LITTLE_ENDIAN = b"\x01" +#: High byte in a 4-byte geometry type field to indicate that a 4-byte SRID +#: field follows. +SRID_FLAG = b"\x20" + +#: Mapping of GeoJSON geometry types to the "2D" 4-byte binary string +#: representation for WKB. "2D" indicates that the geometry is 2-dimensional, +#: X and Y components. +#: NOTE: Byte ordering is big endian. +WKB_2D = { + "Point": b"\x00\x00\x00\x01", + "LineString": b"\x00\x00\x00\x02", + "Polygon": b"\x00\x00\x00\x03", + "MultiPoint": b"\x00\x00\x00\x04", + "MultiLineString": b"\x00\x00\x00\x05", + "MultiPolygon": b"\x00\x00\x00\x06", + "GeometryCollection": b"\x00\x00\x00\x07", +} + +#: Mapping of GeoJSON geometry types to the "Z" 4-byte binary string +#: representation for WKB. "Z" indicates that the geometry is 3-dimensional, +#: with X, Y, and Z components. +#: NOTE: Byte ordering is big endian. +WKB_Z = { + "Point": b"\x00\x00\x03\xe9", + "LineString": b"\x00\x00\x03\xea", + "Polygon": b"\x00\x00\x03\xeb", + "MultiPoint": b"\x00\x00\x03\xec", + "MultiLineString": b"\x00\x00\x03\xed", + "MultiPolygon": b"\x00\x00\x03\xee", + "GeometryCollection": b"\x00\x00\x03\xef", +} + +#: Mapping of GeoJSON geometry types to the "M" 4-byte binary string +#: representation for WKB. "M" indicates that the geometry is 2-dimensional, +#: with X, Y, and M ("Measure") components. +#: NOTE: Byte ordering is big endian. +WKB_M = { + "Point": b"\x00\x00\x07\xd1", + "LineString": b"\x00\x00\x07\xd2", + "Polygon": b"\x00\x00\x07\xd3", + "MultiPoint": b"\x00\x00\x07\xd4", + "MultiLineString": b"\x00\x00\x07\xd5", + "MultiPolygon": b"\x00\x00\x07\xd6", + "GeometryCollection": b"\x00\x00\x07\xd7", +} + +#: Mapping of GeoJSON geometry types to the "ZM" 4-byte binary string +#: representation for WKB. "ZM" indicates that the geometry is 4-dimensional, +#: with X, Y, Z, and M ("Measure") components. +#: NOTE: Byte ordering is big endian. +WKB_ZM = { + "Point": b"\x00\x00\x0b\xb9", + "LineString": b"\x00\x00\x0b\xba", + "Polygon": b"\x00\x00\x0b\xbb", + "MultiPoint": b"\x00\x00\x0b\xbc", + "MultiLineString": b"\x00\x00\x0b\xbd", + "MultiPolygon": b"\x00\x00\x0b\xbe", + "GeometryCollection": b"\x00\x00\x0b\xbf", +} + +#: Mapping of dimension types to maps of GeoJSON geometry type -> 4-byte binary +#: string representation for WKB. +_WKB = { + "2D": WKB_2D, + "Z": WKB_Z, + "M": WKB_M, + "ZM": WKB_ZM, +} + +#: Mapping from binary geometry type (as a 4-byte binary string) to GeoJSON +#: geometry type. +#: NOTE: Byte ordering is big endian. +_BINARY_TO_GEOM_TYPE = dict( + chain(*((reversed(x) for x in wkb_map.items()) for wkb_map in _WKB.values())) +) + +_INT_TO_DIM_LABEL = {2: "2D", 3: "Z", 4: "ZM"} + + +def _get_geom_type(type_bytes): + """Get the GeoJSON geometry type label from a WKB type byte string. + + :param type_bytes: + 4 byte string in big endian byte order containing a WKB type number. + It may also contain a "has SRID" flag in the high byte (the first type, + since this is big endian byte order), indicated as 0x20. If the SRID + flag is not set, the high byte will always be null (0x00). + :returns: + 3-tuple ofGeoJSON geometry type label, the bytes representing the + geometry type, and a separate "has SRID" flag. If the input + `type_bytes` contains an SRID flag, it will be removed. + + >>> # Z Point, with SRID flag + >>> _get_geom_type(b'\\x20\\x00\\x03\\xe9') == ( + ... 'Point', b'\\x00\\x00\\x03\\xe9', True) + True + + >>> # 2D MultiLineString, without SRID flag + >>> _get_geom_type(b'\\x00\\x00\\x00\\x05') == ( + ... 'MultiLineString', b'\\x00\\x00\\x00\\x05', False) + True + + """ + # slice off the high byte, which may contain the SRID flag + high_byte = bytes([type_bytes[0]]) + has_srid = high_byte == b"\x20" + if has_srid: + # replace the high byte with a null byte + type_bytes = as_bin_str(b"\x00" + type_bytes[1:]) + else: + type_bytes = as_bin_str(type_bytes) + + # look up the geometry type + geom_type = _BINARY_TO_GEOM_TYPE.get(type_bytes) + return geom_type, type_bytes, has_srid + + +def dump(obj, dest_file): + """ + Dump GeoJSON-like `dict` to WKB and write it to the `dest_file`. + + :param dict obj: + A GeoJSON-like dictionary. It must at least the keys 'type' and + 'coordinates'. + :param dest_file: + Open and writable file-like object. + """ + dest_file.write(dumps(obj)) + + +def load(source_file): + """ + Load a GeoJSON `dict` object from a ``source_file`` containing WKB (as a + byte string). + + :param source_file: + Open and readable file-like object. + + :returns: + A GeoJSON `dict` representing the geometry read from the file. + """ + return loads(source_file.read()) + + +def dumps(obj, big_endian=True): + """ + Dump a GeoJSON-like `dict` to a WKB string. + + .. note:: + The dimensions of the generated WKB will be inferred from the first + vertex in the GeoJSON `coordinates`. It will be assumed that all + vertices are uniform. There are 4 types: + + - 2D (X, Y): 2-dimensional geometry + - Z (X, Y, Z): 3-dimensional geometry + - M (X, Y, M): 2-dimensional geometry with a "Measure" + - ZM (X, Y, Z, M): 3-dimensional geometry with a "Measure" + + If the first vertex contains 2 values, we assume a 2D geometry. + If the first vertex contains 3 values, this is slightly ambiguous and + so the most common case is chosen: Z. + If the first vertex contains 4 values, we assume a ZM geometry. + + The WKT/WKB standards provide a way of differentiating normal (2D), Z, + M, and ZM geometries (http://en.wikipedia.org/wiki/Well-known_text), + but the GeoJSON spec does not. Therefore, for the sake of interface + simplicity, we assume that geometry that looks 3D contains XYZ + components, instead of XYM. + + If the coordinates list has no coordinate values (this includes nested + lists, for example, `[[[[],[]], []]]`, the geometry is considered to be + empty. Geometries, with the exception of points, have a reasonable + "empty" representation in WKB; however, without knowing the number of + coordinate values per vertex, the type is ambigious, and thus we don't + know if the geometry type is 2D, Z, M, or ZM. Therefore in this case + we expect a `ValueError` to be raised. + + :param dict obj: + GeoJson-like `dict` object. + :param bool big_endian: + Defaults to `True`. If `True`, data values in the generated WKB will + be represented using big endian byte order. Else, little endian. + + :returns: + A WKB binary string representing of the ``obj``. + """ + return _dumps(obj, big_endian) + + +def _dumps(obj, big_endian=True, include_meta=True): + """ + Basically perform the action of dumps, but with some extra flags for + behavior specifically needed by the geopackage...package. + """ + geom_type = obj["type"] + if include_meta: + meta = obj.get("meta", {}) + else: + meta = {} + + exporter = _dumps_registry.get(geom_type) + if exporter is None: + _unsupported_geom_type(geom_type) + + # Check for empty geometries. GeometryCollections have a slightly different + # JSON/dict structure, but that's handled. + coords_or_geoms = obj.get("coordinates", obj.get("geometries")) + if len(list(flatten_multi_dim(coords_or_geoms))) == 0: + raise ValueError( + "Empty geometries cannot be represented in WKB. Reason: The " + "dimensionality of the WKB would be ambiguous." + ) + + return exporter(obj, big_endian, meta) + + +def loads(string): + """ + Construct a GeoJSON `dict` from WKB (`string`). + + The resulting GeoJSON `dict` will include the SRID as an integer in the + `meta` object. This was an arbitrary decision made by `geomet, the + discussion of which took place here: + https://github.com/geomet/geomet/issues/28. + + In order to be consistent with other libraries [1] and (deprecated) + specifications [2], also include the same information in a `crs` + object. This isn't ideal, but the `crs` member is no longer part of + the GeoJSON standard, according to RFC7946 [3]. However, it's still + useful to include this information in GeoJSON payloads because it + supports conversion to EWKT/EWKB (which are canonical formats used by + PostGIS and the like). + + Example: + + {'type': 'Point', + 'coordinates': [0.0, 1.0], + 'meta': {'srid': 4326}, + 'crs': {'type': 'name', 'properties': {'name': 'EPSG4326'}}} + + NOTE(larsbutler): I'm not sure if it's valid to just prefix EPSG + (European Petroluem Survey Group) to an SRID like this, but we'll + stick with it for now until it becomes a problem. + + NOTE(larsbutler): Ideally, we should use URNs instead of this + notation, according to the new GeoJSON spec [4]. However, in + order to be consistent with [1], we'll stick with this approach + for now. + + References: + + [1] - https://github.com/bryanjos/geo/issues/76 + [2] - http://geojson.org/geojson-spec.html#coordinate-reference-system-objects + [3] - https://tools.ietf.org/html/rfc7946#appendix-B.1 + [4] - https://tools.ietf.org/html/rfc7946#section-4 + """ # noqa + string = iter(string) + # endianness = string[0:1] + endianness = as_bin_str(take(1, string)) + if endianness == BIG_ENDIAN: + big_endian = True + elif endianness == LITTLE_ENDIAN: + big_endian = False + else: + raise ValueError( + "Invalid endian byte: '0x%s'. Expected 0x00 or 0x01" + % binascii.hexlify(endianness.encode()).decode() + ) + + endian_token = ">" if big_endian else "<" + # type_bytes = string[1:5] + type_bytes = as_bin_str(take(4, string)) + if not big_endian: + # To identify the type, order the type bytes in big endian: + type_bytes = type_bytes[::-1] + + geom_type, type_bytes, has_srid = _get_geom_type(type_bytes) + srid = None + if has_srid: + srid_field = as_bin_str(take(4, string)) + [srid] = struct.unpack("%si" % endian_token, srid_field) + + # data_bytes = string[5:] # FIXME: This won't work for GeometryCollections + data_bytes = string + + importer = _loads_registry.get(geom_type) + + if importer is None: + _unsupported_geom_type(geom_type) + + data_bytes = iter(data_bytes) + result = importer(big_endian, type_bytes, data_bytes) + if has_srid: + # As mentioned in the docstring above, include both approaches to + # indicating the SRID. + result["meta"] = {"srid": int(srid)} + result["crs"] = { + "type": "name", + "properties": {"name": "EPSG%s" % srid}, + } + return result + + +def _unsupported_geom_type(geom_type): + raise ValueError("Unsupported geometry type '%s'" % geom_type) + + +# TODO: dont default meta to none +def _header_bytefmt_byteorder(geom_type, num_dims, big_endian, meta=None): + """ + Utility function to get the WKB header (endian byte + type header), byte + format string, and byte order string. + """ + dim = _INT_TO_DIM_LABEL.get(num_dims) + if dim is None: + pass # TODO: raise + + type_byte_str = _WKB[dim][geom_type] + srid = meta.get("srid") + if srid is not None: + # Add the srid flag + type_byte_str = SRID_FLAG + type_byte_str[1:] + + if big_endian: + header = BIG_ENDIAN + byte_fmt = b">" + byte_order = ">" + else: + header = LITTLE_ENDIAN + byte_fmt = b"<" + byte_order = "<" + # reverse the byte ordering for little endian + type_byte_str = type_byte_str[::-1] + + header += type_byte_str + if srid is not None: + srid = int(srid) + + if big_endian: + srid_header = struct.pack(">i", srid) + else: + srid_header = struct.pack(" Tuple[pd.DataFrame, BoundingBox, s # Get first (and only) member of set. geometry_type = next(iter(geometry_type)) bounding_box = collect_bounding_box(geometry, geometry_type) - dataframe["geom"] = [geomet.geopackage.dumps(f) for f in geometry] + dataframe["geom"] = [geopackage.dumps(f) for f in geometry] return dataframe, bounding_box, geometry_type From a94393c7103705fe04290d6cfa8af1aeecf8e922 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Wed, 25 Oct 2023 11:43:21 +0200 Subject: [PATCH 26/41] Always overwrite geopackage content --- gistim/compute.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gistim/compute.py b/gistim/compute.py index 9712b42..ddc8be8 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -222,8 +222,7 @@ def compute_steady( for layername, content in observations.items(): tables[layername] = timml_head_observations(timml_model, content["data"]) - if tables: - write_geopackage(tables, crs, path) + write_geopackage(tables, crs, path) return From c6e8d0be8d4b76205c5099010dda41c98cb96d4b Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Wed, 25 Oct 2023 11:43:46 +0200 Subject: [PATCH 27/41] Add "Remote Head Well" element --- plugin/qgistim/core/elements/__init__.py | 3 +- plugin/qgistim/core/elements/headwell.py | 40 +++++++++++++++++++++++- plugin/qgistim/core/formatting.py | 7 +++-- plugin/qgistim/core/processing.py | 1 - plugin/qgistim/widgets/compute_widget.py | 3 -- 5 files changed, 46 insertions(+), 8 deletions(-) diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py index f7d1dba..730650a 100644 --- a/plugin/qgistim/core/elements/__init__.py +++ b/plugin/qgistim/core/elements/__init__.py @@ -11,7 +11,7 @@ from qgistim.core.elements.domain import Domain from qgistim.core.elements.element import Element from qgistim.core.elements.head_line_sink import HeadLineSink -from qgistim.core.elements.headwell import HeadWell +from qgistim.core.elements.headwell import HeadWell, RemoteHeadWell from qgistim.core.elements.impermeable_line_doublet import ImpermeableLineDoublet from qgistim.core.elements.leaky_building_pit import LeakyBuildingPit from qgistim.core.elements.leaky_line_doublet import LeakyLineDoublet @@ -32,6 +32,7 @@ UniformFlow, Well, HeadWell, + RemoteHeadWell, HeadLineSink, LineSinkDitch, CircularAreaSink, diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index ea5746d..9713744 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -1,7 +1,14 @@ from typing import Any, Dict from PyQt5.QtCore import QVariant -from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer +from PyQt5.QtGui import QColor +from qgis.core import ( + QgsArrowSymbolLayer, + QgsDefaultValue, + QgsField, + QgsLineSymbol, + QgsSingleSymbolRenderer, +) from qgistim.core.elements.colors import BLUE from qgistim.core.elements.element import ElementSchema, TransientElement from qgistim.core.schemata import ( @@ -78,3 +85,34 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: "layers": row["layer"], "label": row["label"], } + + +class RemoteHeadWell(HeadWell): + element_type = "Remote Head Well" + geometry_type = "Linestring" + + @classmethod + def renderer(cls) -> QgsSingleSymbolRenderer: + arrow = QgsArrowSymbolLayer() + red, green, blue, _ = [int(v) for v in BLUE.split(",")] + arrow.setColor(QColor(red, green, blue)) + arrow.setHeadLength(2.5) + symbol = QgsLineSymbol.createSimple({}) + symbol.changeSymbolLayer(0, arrow) + return QgsSingleSymbolRenderer(symbol) + + def process_timml_row(self, row, other=None) -> Dict[str, Any]: + xy = self.linestring_xy(row) + xw, yw = xy[-1] + xc, yc = xy[0] + return { + "xw": xw, + "yw": yw, + "hw": row["head"], + "rw": row["radius"], + "res": row["resistance"], + "layers": row["layer"], + "label": row["label"], + "xc": xc, + "yc": yc, + } diff --git a/plugin/qgistim/core/formatting.py b/plugin/qgistim/core/formatting.py index 5b6416b..0bb83aa 100644 --- a/plugin/qgistim/core/formatting.py +++ b/plugin/qgistim/core/formatting.py @@ -15,6 +15,7 @@ "Circular Area Sink": "CircAreaSink", "Well": "Well", "Head Well": "HeadWell", + "Remote Head Well": "HeadWell", "Polygon Inhomogeneity": "PolygonInhomMaq", "Polygon Area Sink": "PolygonInhomMaq", "Polygon Semi-Confined Top": "PolygonInhomMaq", @@ -155,7 +156,9 @@ def to_script_string(data: Dict[str, Any]) -> str: return "\n".join(strings) -def to_json(data: Dict[str, Any], cellsize: float, output_options: Dict[str, bool]) -> Dict[str, Any]: +def to_json( + data: Dict[str, Any], cellsize: float, output_options: Dict[str, bool] +) -> Dict[str, Any]: """ Take the data and add: @@ -186,5 +189,5 @@ def to_json(data: Dict[str, Any], cellsize: float, output_options: Dict[str, boo plugin_name = re.split("timml |ttim ", prefix)[1] timml_name = MAPPING[plugin_name] json_data[layername] = {"type": timml_name, "name": name, "data": element_data} - + return json_data diff --git a/plugin/qgistim/core/processing.py b/plugin/qgistim/core/processing.py index 502b4f8..f56fd07 100644 --- a/plugin/qgistim/core/processing.py +++ b/plugin/qgistim/core/processing.py @@ -23,7 +23,6 @@ QgsVectorLayer, QgsVectorLayerTemporalProperties, ) - from qgistim.core import geopackage diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 0c8acd3..edcb08f 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -57,12 +57,9 @@ def finished(self, result): self.parent.set_interpreter_interaction(True) if result: self.push_success_message() - self.parent.clear_outdated_output(self.data["path"]) - # Load whatever data is available in the geopackage. if self.data["head_observations"] or self.data["discharge"]: self.parent.load_vector_result(self.data["path"]) - if self.data["mesh"]: self.parent.load_mesh_result(self.data["path"], self.data["contours"]) if self.data["raster"]: From 923506152a79915b6c7ebe40cecd9083c3ae4e72 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Wed, 25 Oct 2023 11:45:34 +0200 Subject: [PATCH 28/41] Remove print calls --- gistim/ttim_elements.py | 1 - plugin/qgistim/core/schemata.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/gistim/ttim_elements.py b/gistim/ttim_elements.py index 6803fb9..eaeab20 100644 --- a/gistim/ttim_elements.py +++ b/gistim/ttim_elements.py @@ -388,7 +388,6 @@ def initialize_model(spec: TtimModelSpecification, timml_model) -> TimModel: if (not element_spec.active) or (elementtype not in MAPPING): continue - # print(f"adding {name} as {elementtype}") f_to_kwargs, element = MAPPING[elementtype] for i, kwargs in enumerate(f_to_kwargs(element_spec, model.tstart)): if elementtype == "Observation": diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index a13bd78..162a7d4 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -197,8 +197,6 @@ def __init__(self, x: str, y: str): self.y = y def validate(self, data, _=None) -> MaybeError: - print(data[self.x]) - print(data[self.y]) if (data[self.x] is not None) and (data[self.y] is not None): return f"Either {self.x} or {self.y} should be provided, not both." return None From 85c689701271f3f4aa3afb337ca78f9d2cb73365 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 26 Oct 2023 13:41:39 +0200 Subject: [PATCH 29/41] docstrings --- plugin/qgistim/widgets/error_window.py | 16 +++++++++++++++- plugin/qgistim/widgets/tim_widget.py | 10 ++++++++++ plugin/qgistim/widgets/version_dialog.py | 4 ++++ 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/plugin/qgistim/widgets/error_window.py b/plugin/qgistim/widgets/error_window.py index 2f922ca..fc7be20 100644 --- a/plugin/qgistim/widgets/error_window.py +++ b/plugin/qgistim/widgets/error_window.py @@ -4,7 +4,11 @@ def format_list(errors: Dict[str, Any]): - """Recursive formatting""" + """ + Format the a list of errors to HTML lists. + + Since the level of nesting may vary, this function is called recursively. + """ messages = [] for variable, var_errors in errors.items(): if isinstance(var_errors, list): @@ -19,6 +23,7 @@ def format_list(errors: Dict[str, Any]): def format_errors(errors: Dict[str, Dict[str, Any]]): + """Format the errors per element to HTML lists.""" messages = [] for element, element_errors in errors.items(): messages.append(f"{element}") @@ -27,6 +32,15 @@ def format_errors(errors: Dict[str, Dict[str, Any]]): class ValidationDialog(QDialog): + """ + Presents the result of the validation by the schemata in an HTML window. + + A QTextBrowser is used here since it has some useful properties: + + * It features an automatic scrollbar. + * It uses HTML, so we may show nested lists. + """ + def __init__(self, errors: Dict[str, Dict[str, Any]]): super().__init__() self.cancel_button = QPushButton("Close") diff --git a/plugin/qgistim/widgets/tim_widget.py b/plugin/qgistim/widgets/tim_widget.py index 3622196..b9c825c 100644 --- a/plugin/qgistim/widgets/tim_widget.py +++ b/plugin/qgistim/widgets/tim_widget.py @@ -244,6 +244,11 @@ def execute(self, data: Dict[str, str]) -> Dict[str, Any]: return response def set_interpreter_interaction(self, value: bool) -> None: + """ + Disable interaction with the external interpreter. Some task may take a + minute or so to run. No additional tasks should be scheduled in the + mean time. + """ self.compute_widget.compute_button.setEnabled(value) self.dataset_widget.convert_button.setEnabled(value) self.extraction_widget.extract_button.setEnabled(value) @@ -264,6 +269,11 @@ def crs(self) -> Any: Returns coordinate reference system of current mapview Returns None if the crs does not have meters as its units. + + This is important since TimML and TTim always assume a Cartesian + reference plane, and variables such as conductivity are expressed in + units such as meter per day. As distances are inferred from the + geometry, the geometry must have appropriate units. """ crs = self.iface.mapCanvas().mapSettings().destinationCrs() if crs.mapUnits() not in ( diff --git a/plugin/qgistim/widgets/version_dialog.py b/plugin/qgistim/widgets/version_dialog.py index c62699e..ac6c763 100644 --- a/plugin/qgistim/widgets/version_dialog.py +++ b/plugin/qgistim/widgets/version_dialog.py @@ -12,6 +12,10 @@ class VersionDialog(QDialog): + """ + Provides some basic info on what versions are used by the back-end. + """ + def __init__(self, parent=None): QDialog.__init__(self, parent) self.setWindowTitle("QGIS-Tim Version Info") From a404229cf9198d6b7e7f8846a4e4001485a023a6 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 26 Oct 2023 13:42:17 +0200 Subject: [PATCH 30/41] Isolate set_temporal_properties into a function --- plugin/qgistim/core/processing.py | 25 ++++++++++-------- plugin/qgistim/widgets/compute_widget.py | 32 ++++++------------------ 2 files changed, 23 insertions(+), 34 deletions(-) diff --git a/plugin/qgistim/core/processing.py b/plugin/qgistim/core/processing.py index f56fd07..32e540d 100644 --- a/plugin/qgistim/core/processing.py +++ b/plugin/qgistim/core/processing.py @@ -170,7 +170,7 @@ def transient_contours( end_dates = { a: b - datetime.timedelta(minutes=1) for a, b in zip(dates[:-1], dates[1:]) } - end_dates[dates[-1]] = dates[-1] + datetime.timedelta(days=1) + end_dates[dates[-1]] = dates[-1] + datetime.timedelta(minutes=1) # Add items to layer for item in feature_data: @@ -188,18 +188,22 @@ def transient_contours( provider.addFeature(f) contour_layer.updateExtents() - # Set the temporal properties - temporal_properties = contour_layer.temporalProperties() - temporal_properties.setStartField("datetime_start") - temporal_properties.setEndField("datetime_end") - temporal_properties.setMode( - QgsVectorLayerTemporalProperties.ModeFeatureDateTimeStartAndEndFromFields - ) - temporal_properties.setIsActive(True) - return contour_layer +def set_temporal_properties(layer: QgsVectorLayer) -> None: + fields = [field.name() for field in layer.fields()] + if ("datetime_start" in fields) and ("datetime_end" in fields): + temporal_properties = layer.temporalProperties() + temporal_properties.setStartField("datetime_start") + temporal_properties.setEndField("datetime_end") + temporal_properties.setMode( + QgsVectorLayerTemporalProperties.ModeFeatureDateTimeStartAndEndFromFields + ) + temporal_properties.setIsActive(True) + return + + def mesh_contours( gpkg_path: str, layer: QgsMeshLayer, @@ -221,4 +225,5 @@ def mesh_contours( layername=name, newfile=newfile, ) + set_temporal_properties(written_layer) return written_layer diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index edcb08f..d88012f 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -25,13 +25,12 @@ QgsRasterLayer, QgsTask, QgsVectorLayer, - QgsVectorLayerTemporalProperties, ) from qgis.gui import QgsMapLayerComboBox from qgistim.core import geopackage, layer_styling from qgistim.core.dummy_ugrid import write_dummy_ugrid from qgistim.core.elements import ELEMENTS, parse_name -from qgistim.core.processing import mesh_contours +from qgistim.core.processing import mesh_contours, set_temporal_properties from qgistim.core.task import BaseServerTask @@ -224,6 +223,7 @@ def add_contour_layer(self, layer) -> None: self.parent.output_group.add_layer( layer, "vector", renderer=renderer, on_top=True, labels=labels ) + return @property def output_path(self) -> str: @@ -290,6 +290,7 @@ def set_output_path(self) -> None: self.output_line_edit.setText(str(Path(path).with_suffix(""))) # Note: Qt does pretty good validity checking of the Path in the # Dialog, there is no real need to validate path here. + return def set_default_path(self, text: str) -> None: """ @@ -299,6 +300,7 @@ def set_default_path(self, text: str) -> None: return path = Path(text) self.output_line_edit.setText(str(path.parent / path.stem)) + return def compute(self) -> None: """ @@ -351,6 +353,7 @@ def compute(self) -> None: ) self.set_interpreter_interaction(False) QgsApplication.taskManager().addTask(self.compute_task) + return def domain(self) -> None: """ @@ -360,6 +363,7 @@ def domain(self) -> None: ymax, ymin = item.element.update_extent(self.parent.iface) self.set_cellsize_from_domain(ymax, ymin) self.parent.iface.mapCanvas().refreshAllLayers() + return def set_cellsize_from_domain(self, ymax: float, ymin: float) -> None: # Guess a reasonable value for the cellsize: about 50 rows @@ -373,6 +377,7 @@ def set_cellsize_from_domain(self, ymax: float, ymin: float) -> None: elif dy > 1.0: dy = round(dy) self.cellsize_spin_box.setValue(dy) + return def load_mesh_result(self, path: Union[Path, str], load_contours: bool) -> None: path = Path(path) @@ -446,16 +451,6 @@ def steady_or_first(name: str) -> bool: layer.setRenderer(renderer) self.parent.output_group.add_layer(layer, "raster") - # if contour: - # contour_layer = raster_steady_contours( - # layer=layer, - # name=name, - # start=start, - # stop=stop, - # step=step, - # ) - # self.add_contour_layer(contour_layer) - return def load_vector_result(self, path: Union[Path, str]) -> None: @@ -471,19 +466,8 @@ def load_vector_result(self, path: Union[Path, str]) -> None: layer = QgsVectorLayer( f"{gpkg_path}|layername={layername}", layers_panel_name ) - # Set the temporal properties if it's a temporal layer - temporal_properties = layer.temporalProperties() - fields = [field.name() for field in layer.fields()] - if ("datetime_start" in fields) and ("datetime_end" in fields): - temporal_properties.setStartField("datetime_start") - temporal_properties.setEndField("datetime_end") - temporal_properties.setMode( - QgsVectorLayerTemporalProperties.ModeFeatureDateTimeStartAndEndFromFields - ) - temporal_properties.setIsActive(True) - else: - temporal_properties.setIsActive(False) + set_temporal_properties(layer) if ( "timml Head Observation:" in layername From 384d79779dc9b2ceb978e31ce98b8eb85317b65f Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 26 Oct 2023 13:42:50 +0200 Subject: [PATCH 31/41] Make sure pandas write dates as SQL DATETIME --- gistim/geopackage.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/gistim/geopackage.py b/gistim/geopackage.py index 0835818..430675f 100644 --- a/gistim/geopackage.py +++ b/gistim/geopackage.py @@ -7,6 +7,7 @@ from pathlib import Path from typing import Dict, List, NamedTuple, Tuple +import numpy as np import pandas as pd from gistim.geomet import geopackage @@ -120,6 +121,18 @@ def process_table(dataframe: pd.DataFrame) -> Tuple[pd.DataFrame, BoundingBox, s return dataframe, bounding_box, geometry_type +def force_sql_datetime(df: pd.DataFrame): + """ + Pandas SQL writes datetimes as SQL Timestamps, which are not accepted by QGIS. + They have to SQL DATETIME instead. + """ + return { + colname: "DATETIME" + for colname, dtype in df.dtypes.to_dict().items() + if np.issubdtype(dtype, np.datetime64) + } + + def write_geopackage( tables: Dict[str, pd.DataFrame], crs: CoordinateReferenceSystem, path: Path ) -> None: @@ -162,7 +175,9 @@ def write_geopackage( table_names.append(layername) geometry_types.append(geometry_type) bounding_boxes.append(bounding_box) - dataframe.to_sql(layername, con=connection) + dataframe.to_sql( + layername, con=connection, dtype=force_sql_datetime(dataframe) + ) # Create mandatory geopackage tables. gpkg_contents = create_gpkg_contents( From d18deec1434f16b54cf0c0439c71c74a349b5e3f Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 26 Oct 2023 13:43:06 +0200 Subject: [PATCH 32/41] Remove deprecated modules --- gistim/ttim_elements.py | 441 ------------- plugin/qgistim/core/tim_elements.py | 956 ---------------------------- 2 files changed, 1397 deletions(-) delete mode 100644 gistim/ttim_elements.py delete mode 100644 plugin/qgistim/core/tim_elements.py diff --git a/gistim/ttim_elements.py b/gistim/ttim_elements.py deleted file mode 100644 index eaeab20..0000000 --- a/gistim/ttim_elements.py +++ /dev/null @@ -1,441 +0,0 @@ -""" -This model converts the geodataframe as read from the geopackage into keyword -arguments for TTim. - -These keyword arguments are used to initialize a model, or used to generate a -Python script. - -""" -from collections import defaultdict -from typing import Any, Dict, List, Tuple - -import black -import geopandas as gpd -import numpy as np -import pandas as pd -import timml -import tqdm -import xarray as xr - -try: - import ttim - from ttim.model import TimModel -except ImportError: - ttim = None - TimModel = None - -from gistim.common import ( - ElementSpecification, - FloatArray, - TransientElementSpecification, - TtimModelSpecification, - aquifer_data, - dict_to_kwargs_code, - filter_nan, - headgrid_code, - linestring_coordinates, - point_coordinates, - sanitized, -) - - -# Dataframe to ttim element -# -------------------------- -def transient_dataframe(spec): - if spec.dataframe is not None: - return spec.dataframe.set_index("timeseries_id") - else: - return None - - -def transient_input(timeseries_df, row, field, time_start) -> List: - timeseries_id = row["timeseries_id"] - row_start = row.get("time_start") - row_end = row.get("time_end") - transient_value = row.get(f"{field}_transient") - if row_start is not None and row_end is not None and transient_value is not None: - steady_value = row[field] - return [(row_start, transient_value - steady_value), (row_end, 0.0)] - elif timeseries_df is None or timeseries_id not in timeseries_df.index: - return [(time_start, 0.0)] - else: - timeseries = timeseries_df.loc[[timeseries_id]] - timeseries[field] -= row[field] - timeseries = timeseries.sort_values(by="time_start") - return list( - timeseries[["time_start", field]].itertuples(index=False, name=None) - ) - - -def ttim_model( - dataframe: gpd.GeoDataFrame, - temporal_settings: pd.DataFrame, - timml_model: timml.Model, -) -> Dict[str, Any]: - """ - Parameters - ---------- - dataframe: geopandas.GeoDataFrame - - Returns - ------- - ttim.TimModel - """ - data = aquifer_data(dataframe, transient=True) - row = temporal_settings.iloc[0].to_dict() - data["tstart"] = row["time_start"] - data["tmin"] = row["time_min"] - data["tmax"] = row["time_max"] - data["M"] = row["stehfest_M"] - data["timmlmodel"] = timml_model - return data - - -def observation(spec: TransientElementSpecification, _): - df = transient_dataframe(spec) - if len(df) == 0: - return {} - - dataframe = spec.steady_spec.dataframe - X, Y = point_coordinates(dataframe) - kwargslist = [] - for i, (row, x, y) in enumerate(zip(dataframe.to_dict("records"), X, Y)): - row = filter_nan(row) - timeseries_id = row["timeseries_id"] - kwargslist.append( - { - "x": x, - "y": y, - "t": df.loc[timeseries_id, "time"].values, - "label": row["label"], - "observation_id": i, - } - ) - return kwargslist - - -def well( - spec: TransientElementSpecification, time_start: float -) -> List[Dict[str, Any]]: - df = transient_dataframe(spec) - dataframe = spec.steady_spec.dataframe - X, Y = point_coordinates(dataframe) - kwargslist = [] - for row, x, y in zip(dataframe.to_dict("records"), X, Y): - row = filter_nan(row) - kwargslist.append( - { - "xw": x, - "yw": y, - "tsandQ": transient_input(df, row, "discharge", time_start), - "rw": row["radius"], - "res": row["resistance"], - "layers": row["layer"], - "label": row["label"], - "rc": row["caisson_radius"], - "wbstype": "slug" if row["slug"] else "pumping", - } - ) - return kwargslist - - -def headwell( - spec: TransientElementSpecification, time_start: float -) -> List[Dict[str, Any]]: - df = transient_dataframe(spec) - dataframe = spec.steady_spec.dataframe - X, Y = point_coordinates(dataframe) - kwargslist = [] - for row, x, y in zip(dataframe.to_dict("records"), X, Y): - row = filter_nan(row) - kwargslist.append( - { - "xw": x, - "yw": y, - "tsandh": transient_input(df, row, "head", time_start), - "rw": row["radius"], - "res": row["resistance"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def headlinesink( - spec: TransientElementSpecification, time_start: float -) -> List[Dict[str, Any]]: - df = transient_dataframe(spec) - kwargslist = [] - for row in spec.steady_spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "tsandh": transient_input(df, row, "head", time_start), - "res": row["resistance"], - "wh": row["width"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def linesinkditch( - spec: ElementSpecification, time_start: float -) -> List[Dict[str, Any]]: - df = transient_dataframe(spec) - kwargslist = [] - for row in spec.steady_spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "tsandQ": transient_input(df, row, "discharge", time_start), - "res": row["resistance"], - "wh": row["width"], - "layers": row["layer"], - "label": row["label"], - } - ) - return kwargslist - - -def leakylinedoublet( - spec: ElementSpecification, time_start: float -) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "res": row["resistance"], - "layers": row["layer"], - "order": row["order"], - "label": row["label"], - } - ) - return kwargslist - - -def implinedoublet( - spec: ElementSpecification, time_start: float -) -> List[Dict[str, Any]]: - kwargslist = [] - for row in spec.dataframe.to_dict("records"): - row = filter_nan(row) - kwargslist.append( - { - "xy": linestring_coordinates(row), - "res": "imp", - "layers": row["layer"], - "order": row["order"], - "label": row["label"], - } - ) - return kwargslist - - -def circareasink(spec: TransientElementSpecification, time_start: float) -> None: - """ - Parameters - ---------- - dataframe: geopandas.GeoDataFrame - - Returns - ------- - None - """ - df = transient_dataframe(spec) - kwargslist = [] - for row in spec.steady_spec.dataframe.to_dict("records"): - row = filter_nan(row) - x, y = np.array(row["geometry"].centroid.coords)[0] - coords = np.array(row["geometry"].exterior.coords) - x0, y0 = coords[0] - radius = np.sqrt((x0 - x) ** 2 + (y0 - y) ** 2) - kwargslist.append( - { - "xc": x, - "yc": y, - "R": radius, - "tsandN": transient_input(df, row, "rate", time_start), - } - ) - return kwargslist - - -def head_observations( - model: TimModel, - reference_date: pd.Timestamp, - observations: Dict, -) -> gpd.GeoDataFrame: - # We'll duplicate all values in time, except head which is unique per time. - refdate = pd.to_datetime(reference_date) - geodataframes = {} - for name, kwargs_collections in observations.items(): - xx = [] - yy = [] - labels = [] - heads = [] - starts = [] - ends = [] - observation_ids = [] - for kwargs in kwargs_collections: - x = kwargs["x"] - y = kwargs["y"] - t = kwargs["t"] - end = refdate + pd.to_timedelta(t, "D") - start = np.insert(end[:-1], 0, refdate) - - starts.append(start) - ends.append(end) - heads.append(model.head(x=x, y=y, t=t)) - xx.append(np.repeat(x, len(t))) - yy.append(np.repeat(y, len(t))) - labels.append(np.repeat(kwargs["label"], len(t))) - observation_ids.append(np.repeat(kwargs["observation_id"], len(t))) - - d = { - "geometry": gpd.points_from_xy(np.concatenate(xx), np.concatenate(yy)), - "datetime_start": np.concatenate(starts), - "datetime_end": np.concatenate(ends), - "label": np.concatenate(labels), - "observation_id": np.concatenate(observation_ids), - } - for i, layerhead in enumerate(np.hstack(heads)): - d[f"head_layer{i}"] = layerhead - - geodataframes[name] = gpd.GeoDataFrame(d) - - return geodataframes - - -def headgrid( - model: TimModel, - extent: Tuple[float], - cellsize: float, - times: FloatArray, - reference_date: pd.Timestamp, -) -> xr.DataArray: - xmin, xmax, ymin, ymax = extent - x = np.arange(xmin, xmax, cellsize) + 0.5 * cellsize - # In geospatial rasters, y is DECREASING with row number - y = np.arange(ymax, ymin, -cellsize) - 0.5 * cellsize - nlayer = model.aq.find_aquifer_data(x[0], y[0]).naq - layer = [i for i in range(nlayer)] - head = np.empty((nlayer, times.size, y.size, x.size), dtype=np.float64) - for i in tqdm.tqdm(range(y.size)): - for j in range(x.size): - head[:, :, i, j] = model.head(x[j], y[i], times, layer) - - time = pd.to_datetime(reference_date) + pd.to_timedelta(times, "D") - return xr.DataArray( - data=head, - name="head", - coords={"layer": layer, "time": time, "y": y, "x": x}, - dims=("layer", "time", "y", "x"), - ) - - -# Map the names of the elements to their constructors -if ttim is not None: - MAPPING = { - "Circular Area Sink": (circareasink, ttim.CircAreaSink), - "Well": (well, ttim.Well), - "Head Well": (headwell, ttim.HeadWell), - "Head Line Sink": (headlinesink, ttim.HeadLineSinkString), - "Line Sink Ditch": (linesinkditch, ttim.LineSinkDitchString), - "Leaky Line Doublet": (leakylinedoublet, ttim.LeakyLineDoubletString), - "Impermeable Line Doublet": (implinedoublet, ttim.LeakyLineDoubletString), - "Observation": (observation, None), - } - - -def initialize_model(spec: TtimModelSpecification, timml_model) -> TimModel: - """ - Initialize a Ttim analytic model based on the data in a geopackage. - - Parameters - ---------- - spec: ModelSpecification - Named tuple with the layer name of the aquifer and a dictionary of - other element names to its construction function. - - Returns - ------- - timml.Model - - Examples - -------- - - >>> import gistim - >>> path = "my-model.gpkg" - >>> spec = gistim.model_specification(path) - >>> model = gistim.initialize_model(spec) - >>> model.solve() - - """ - model = ttim.ModelMaq( - **ttim_model(spec.aquifer, spec.temporal_settings, timml_model) - ) - elements = {} - observations = defaultdict(list) - for name, element_spec in spec.elements.items(): - elementtype = element_spec.elementtype - if (not element_spec.active) or (elementtype not in MAPPING): - continue - - f_to_kwargs, element = MAPPING[elementtype] - for i, kwargs in enumerate(f_to_kwargs(element_spec, model.tstart)): - if elementtype == "Observation": - observations[name].append(kwargs) - else: - kwargs["model"] = model - elements[f"{name}_{i}"] = element(**kwargs) - - return model, elements, observations - - -def convert_to_script(spec: TtimModelSpecification) -> str: - """ - Convert model specification to an equivalent Python script. - """ - modelkwargs = ttim_model(spec.aquifer, spec.temporal_settings, "model") - strkwargs = dict_to_kwargs_code(modelkwargs) - - observations = {} - strings = ["import ttim", f"ttim_model = ttim.ModelMaq({strkwargs})"] - for name, element_spec in spec.elements.items(): - elementtype = element_spec.elementtype - if elementtype not in MAPPING: - continue - - f_to_kwargs, element = MAPPING[elementtype] - for i, kwargs in enumerate(f_to_kwargs(element_spec, modelkwargs["tstart"])): - if elementtype == "Head Observation": - kwargs.pop("label") - kwargs.pop("observation_id") - kwargs = dict_to_kwargs_code(kwargs) - observations[f"ttim_observation_{sanitized(name)}_{i}"] = kwargs - else: - kwargs["model"] = "ttim_model" - kwargs = dict_to_kwargs_code(kwargs) - strings.append( - f"ttim_{sanitized(name)}_{i} = ttim.{element.__name__}({kwargs})" - ) - - strings.append("ttim_model.solve()") - - xg, yg = headgrid_code(spec.domain) - times = spec.output_times.tolist() - if len(times) > 0: - strings.append(f"ttim_head = ttim_model.headgrid(xg={xg}, yg={yg}, t={times})") - - # Add all the individual observation points - for name, kwargs in observations.items(): - strings.append(f"{name} = ttim_model.head({kwargs})") - - return black.format_str("\n".join(strings), mode=black.FileMode()) diff --git a/plugin/qgistim/core/tim_elements.py b/plugin/qgistim/core/tim_elements.py deleted file mode 100644 index 79f2efd..0000000 --- a/plugin/qgistim/core/tim_elements.py +++ /dev/null @@ -1,956 +0,0 @@ -""" -This module contains the classes to represent the TimML and TTim input layers. - -The classes specify: - -* The (unabbreviated) name -* The type of geometry (No geometry, point, linestring, polygon) -* The required attributes of the attribute table - -They contain logic for setting up: - -* Simple input, requiring a single table, e.g. Uniform Flow or Constant -* Transient input, requiring two tables, one with geometry and steady-state - properties, and one containing optional time series input e.g. Well, Head Line - Sink. -* Associated input, requiring two tables, one with geometry and one with input - for layers, e.g. Polygon Inhomogeneity or Building Pit. - -Each element is (optionally) represented in multiple places: - -* It always lives in a GeoPackage. -* While a geopackage is active within plugin, it is always represented in a - Dataset Tree: the Dataset Tree provides a direct look at the state of the - GeoPackage. In this tree, steady and transient input are on the same row. - Associated input is, to potentially enable transient associated data later - on (like a building pit with changing head top boundary). -* It can be added to the Layers Panel in QGIS. This enables a user to visualize - and edit its data. - -Some elements require specific rendering in QGIS (e.g. no fill polygons), which -are supplied by the `.renderer()` staticmethod. - -The coupling of separate tables (geometry table and time series table) is only -explicit in the Dataset Tree. The only way of knowing that tables are -associated with each other is by comparing names. Names must therefore be -unique within a group of the same type of elements. - -Rendering: - -* Fixed discharge (area sink, well, ditch): green -* Fixed head (head well, semi-confined top, head line): blue -* Inhomogeneity: grey -* Constant: star -* Observations: triangle -* Line Doublets: red -* Polygons: Line and Fill same color, Fill color 15% opacity. - -Nota bene: the order of the columns is important for hiding and showing the -transient columns. QGIS has a bug which causes it to show the wrong column if -hidden columns appear before shown ones. This only affects the attribute table -when it has no features. -""" - -import abc -import re -from collections import defaultdict -from functools import partial -from typing import Any, Dict, List, Tuple - -from PyQt5.QtCore import QVariant -from PyQt5.QtWidgets import ( - QDialog, - QHBoxLayout, - QLabel, - QLineEdit, - QPushButton, - QVBoxLayout, -) -from qgis.core import ( - QgsDefaultValue, - QgsFeature, - QgsField, - QgsFillSymbol, - QgsGeometry, - QgsLineSymbol, - QgsMarkerSymbol, - QgsPointXY, - QgsSingleSymbolRenderer, - QgsVectorLayer, -) -from qgistim.core import geopackage - -RED = "215,48,39,255" -GREEN = "51,160,44,255" -BLUE = "31,120,180,255" -GREY = "135,135,135,255" -BLACK = "0,0,0,255" -LIGHT_BLUE = "166,206,227,255" - -OPACITY = 51 # 20% -TRANSPARENT = "255,255,255,0" -TRANSPARENT_RED = f"215,48,39,{OPACITY}" -TRANSPARENT_GREEN = f"51,160,44,{OPACITY}" -TRANSPARENT_BLUE = f"31,120,180,{OPACITY}" -TRANSPARENT_GREY = f"135,135,135,{OPACITY}" - - -class NameDialog(QDialog): - def __init__(self, parent=None): - super().__init__(parent) - self.name_line_edit = QLineEdit() - self.ok_button = QPushButton("OK") - self.cancel_button = QPushButton("Cancel") - self.ok_button.clicked.connect(self.accept) - self.cancel_button.clicked.connect(self.reject) - first_row = QHBoxLayout() - first_row.addWidget(QLabel("Layer name")) - first_row.addWidget(self.name_line_edit) - second_row = QHBoxLayout() - second_row.addStretch() - second_row.addWidget(self.ok_button) - second_row.addWidget(self.cancel_button) - layout = QVBoxLayout() - layout.addLayout(first_row) - layout.addLayout(second_row) - self.setLayout(layout) - - -class Element: - """ - Abstract base class for "ordinary" timml elements. - """ - - element_type = None - geometry_type = None - timml_attributes = () - ttim_attributes = () - assoc_attributes = () - transient_columns = () - timml_defaults = {} - ttim_defaults = {} - assoc_defaults = {} - - def _initialize_default(self, path, name): - self.name = name - self.path = path - self.timml_name = None - self.ttim_name = None - self.assoc_name = None - self.timml_layer = None - self.ttim_layer = None - self.assoc_layer = None - self.item = None - - def __init__(self, path: str, name: str): - self._initialize_default(path, name) - self.timml_name = f"timml {self.element_type}:{name}" - - @classmethod - def dialog(cls, path: str, crs: Any, iface: Any, names: List[str]): - dialog = NameDialog() - dialog.show() - ok = dialog.exec_() - if not ok: - return - - name = dialog.name_line_edit.text() - if name in names: - raise ValueError(f"Name already exists in geopackage: {name}") - - instance = cls(path, name) - instance.create_layers(crs) - return instance - - def create_layer( - self, crs: Any, geometry_type: str, name: str, attributes: List - ) -> QgsVectorLayer: - layer = QgsVectorLayer(geometry_type, name, "memory") - provider = layer.dataProvider() - provider.addAttributes(attributes) - layer.updateFields() - layer.setCrs(crs) - return layer - - def create_timml_layer(self, crs: Any): - self.timml_layer = self.create_layer( - crs=crs, - geometry_type=self.geometry_type, - name=self.timml_name, - attributes=self.timml_attributes, - ) - - def create_ttim_layer(self, crs: Any): - pass - - def create_assoc_layer(self, crs: Any): - pass - - def create_layers(self, crs: Any): - self.create_timml_layer(crs) - self.create_ttim_layer(crs) - self.create_assoc_layer(crs) - - def set_defaults(self): - for layer, defaults in zip( - (self.timml_layer, self.ttim_layer, self.assoc_layer), - (self.timml_defaults, self.ttim_defaults, self.assoc_defaults), - ): - if layer is None: - continue - fields = layer.fields() - for name, definition in defaults.items(): - index = fields.indexFromName(name) - layer.setDefaultValueDefinition(index, definition) - return - - @staticmethod - def marker_renderer(**kwargs): - symbol = QgsMarkerSymbol.createSimple(kwargs) - return QgsSingleSymbolRenderer(symbol) - - @staticmethod - def line_renderer(**kwargs): - symbol = QgsLineSymbol.createSimple(kwargs) - return QgsSingleSymbolRenderer(symbol) - - @staticmethod - def polygon_renderer(**kwargs): - symbol = QgsFillSymbol.createSimple(kwargs) - return QgsSingleSymbolRenderer(symbol) - - @classmethod - def renderer(cls): - return None - - def timml_layer_from_geopackage(self) -> QgsVectorLayer: - self.timml_layer = QgsVectorLayer( - f"{self.path}|layername={self.timml_name}", self.timml_name - ) - - def ttim_layer_from_geopackage(self): - return - - def assoc_layer_from_geopackage(self): - return - - def load_layers_from_geopackage(self) -> None: - self.timml_layer_from_geopackage() - self.ttim_layer_from_geopackage() - self.assoc_layer_from_geopackage() - self.set_defaults() - return - - def write(self): - self.timml_layer = geopackage.write_layer( - self.path, self.timml_layer, self.timml_name - ) - self.set_defaults() - - def remove_from_geopackage(self): - geopackage.remove_layer(self.path, self.timml_name) - - def on_transient_changed(self, transient: bool): - if len(self.transient_columns) == 0: - return - - config = self.timml_layer.attributeTableConfig() - columns = config.columns() - - for i, column in enumerate(columns): - if column.name in self.transient_columns: - config.setColumnHidden(i, not transient) - - self.timml_layer.setAttributeTableConfig(config) - return - - -class TransientElement(Element): - """ - Abstract base class for transient (ttim) elements. - """ - - def __init__(self, path: str, name: str): - self._initialize_default(path, name) - self.timml_name = f"timml {self.element_type}:{name}" - self.ttim_name = f"ttim {self.element_type}:{name}" - - def create_ttim_layer(self, crs: Any): - self.ttim_layer = self.create_layer( - crs=crs, - geometry_type="No Geometry", - name=self.ttim_name, - attributes=self.ttim_attributes, - ) - - def ttim_layer_from_geopackage(self): - self.ttim_layer = QgsVectorLayer( - f"{self.path}|layername={self.ttim_name}", - self.ttim_name, - ) - - def write(self): - self.timml_layer = geopackage.write_layer( - self.path, self.timml_layer, self.timml_name - ) - self.ttim_layer = geopackage.write_layer( - self.path, self.ttim_layer, self.ttim_name - ) - self.set_defaults() - - def remove_from_geopackage(self): - geopackage.remove_layer(self.path, self.timml_name) - geopackage.remove_layer(self.path, self.ttim_name) - - -class AssociatedElement(Element): - """ - Abstract class for elements that require associated tables such as - Inhomogenities. - """ - - def __init__(self, path: str, name: str): - self._initialize_default(path, name) - self.timml_name = f"timml {self.element_type}:{name}" - self.assoc_name = f"timml {self.element_type} Properties:{name}" - - def create_assoc_layer(self, crs: Any): - self.assoc_layer = self.create_layer( - crs=crs, - geometry_type="No Geometry", - name=self.assoc_name, - attributes=self.assoc_attributes, - ) - - def assoc_layer_from_geopackage(self): - self.assoc_layer = QgsVectorLayer( - f"{self.path}|layername={self.assoc_name}", - self.assoc_name, - ) - - def write(self): - self.timml_layer = geopackage.write_layer( - self.path, self.timml_layer, self.timml_name - ) - self.assoc_layer = geopackage.write_layer( - self.path, self.assoc_layer, self.assoc_name - ) - self.set_defaults() - - def remove_from_geopackage(self): - geopackage.remove_layer(self.path, self.timml_name) - geopackage.remove_layer(self.path, self.assoc_name) - - -class Domain(TransientElement): - element_type = "Domain" - geometry_type = "Polygon" - ttim_attributes = (QgsField("time", QVariant.Double),) - - def __init__(self, path: str, name: str): - self._initialize_default(path, name) - self.timml_name = f"timml {self.element_type}:Domain" - self.ttim_name = "ttim Computation Times:Domain" - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - """ - Results in transparent fill, with a medium thick black border line. - """ - return cls.polygon_renderer( - color="255,0,0,0", color_border=BLACK, width_border="0.75" - ) - - def remove_from_geopackage(self): - pass - - def update_extent(self, iface: Any) -> Tuple[float, float]: - provider = self.timml_layer.dataProvider() - provider.truncate() # removes all features - canvas = iface.mapCanvas() - extent = canvas.extent() - xmin = extent.xMinimum() - ymin = extent.yMinimum() - xmax = extent.xMaximum() - ymax = extent.yMaximum() - points = [ - QgsPointXY(xmin, ymax), - QgsPointXY(xmax, ymax), - QgsPointXY(xmax, ymin), - QgsPointXY(xmin, ymin), - ] - feature = QgsFeature() - feature.setGeometry(QgsGeometry.fromPolygonXY([points])) - provider.addFeatures([feature]) - canvas.refresh() - return ymax, ymin - - -class Aquifer(TransientElement): - element_type = "Aquifer" - geometry_type = "No Geometry" - timml_attributes = [ - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - QgsField("aquitard_s", QVariant.Double), - QgsField("aquifer_s", QVariant.Double), - QgsField("aquitard_npor", QVariant.Double), - QgsField("aquifer_npor", QVariant.Double), - ] - ttim_attributes = ( - QgsField("time_min", QVariant.Double), - QgsField("time_max", QVariant.Double), - QgsField("time_start", QVariant.Double), - QgsField("stehfest_M", QVariant.Int), - QgsField("reference_date", QVariant.DateTime), - ) - ttim_defaults = { - "time_min": QgsDefaultValue("0.01"), - "time_max": QgsDefaultValue("10.0"), - "time_start": QgsDefaultValue("0.0"), - "stehfest_M": QgsDefaultValue("10"), - } - transient_columns = ( - "aquitard_s", - "aquifer_s", - "aquitard_npor", - "aquifer_npor", - ) - - def __init__(self, path: str, name: str): - self._initialize_default(path, name) - self.timml_name = f"timml {self.element_type}:Aquifer" - self.ttim_name = "ttim Temporal Settings:Aquifer" - - def write(self): - self.timml_layer = geopackage.write_layer( - self.path, self.timml_layer, self.timml_name, newfile=True - ) - self.ttim_layer = geopackage.write_layer( - self.path, self.ttim_layer, self.ttim_name - ) - self.set_defaults() - - def remove_from_geopackage(self): - """This element may not be removed.""" - return - - -class UniformFlow(Element): - element_type = "Uniform Flow" - geometry_type = "No geometry" - timml_attributes = ( - QgsField("slope", QVariant.Double), - QgsField("angle", QVariant.Double), - QgsField("label", QVariant.String), - ) - - -class Constant(Element): - element_type = "Constant" - geometry_type = "Point" - timml_attributes = ( - QgsField("head", QVariant.Double), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - ) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.marker_renderer(color=RED, name="star", size="5") - - -class HeadObservation(TransientElement): - element_type = "Head Observation" - geometry_type = "Point" - timml_attributes = ( - QgsField("label", QVariant.String), - QgsField("timeseries_id", QVariant.Int), - ) - ttim_attributes = ( - QgsField("timeseries_id", QVariant.Int), - QgsField("time", QVariant.Double), - ) - timml_defaults = { - "timeseries_id": QgsDefaultValue("1"), - } - ttim_defaults = { - "timeseries_id": QgsDefaultValue("1"), - } - transient_columns = ("timeseries_id",) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.marker_renderer(color=LIGHT_BLUE, name="triangle", size="3") - - -class Well(TransientElement): - element_type = "Well" - geometry_type = "Point" - timml_attributes = ( - QgsField("discharge", QVariant.Double), - QgsField("radius", QVariant.Double), - QgsField("resistance", QVariant.Double), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - QgsField("time_start", QVariant.Double), - QgsField("time_end", QVariant.Double), - QgsField("discharge_transient", QVariant.Double), - QgsField("caisson_radius", QVariant.Double), - QgsField("slug", QVariant.Bool), - QgsField("timeseries_id", QVariant.Int), - ) - ttim_attributes = ( - QgsField("timeseries_id", QVariant.Int), - QgsField("time_start", QVariant.Double), - QgsField("discharge", QVariant.Double), - ) - timml_defaults = { - "radius": QgsDefaultValue("0.1"), - "resistance": QgsDefaultValue("0.0"), - "caisson_radius": QgsDefaultValue("0.0"), - "slug": QgsDefaultValue("True"), - } - transient_columns = ( - "time_start", - "time_end", - "discharge_transient", - "caisson_radius", - "slug", - "timeseries_id", - ) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.marker_renderer(color=GREEN, size="3") - - -class HeadWell(TransientElement): - element_type = "Head Well" - geometry_type = "Point" - timml_attributes = ( - QgsField("head", QVariant.Double), - QgsField("radius", QVariant.Double), - QgsField("resistance", QVariant.Double), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - QgsField("time_start", QVariant.Double), - QgsField("time_end", QVariant.Double), - QgsField("head_transient", QVariant.Double), - QgsField("timeseries_id", QVariant.Int), - ) - ttim_attributes = ( - QgsField("timeseries_id", QVariant.Int), - QgsField("time_start", QVariant.Double), - QgsField("head", QVariant.Double), - ) - timml_defaults = { - "radius": QgsDefaultValue("0.1"), - "resistance": QgsDefaultValue("0.0"), - } - transient_columns = ( - "time_start", - "time_end", - "head_transient", - "timeseries_id", - ) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.marker_renderer(color=BLUE, size="3") - - -class HeadLineSink(TransientElement): - element_type = "Head Line Sink" - geometry_type = "Linestring" - timml_attributes = ( - QgsField("head", QVariant.Double), - QgsField("resistance", QVariant.Double), - QgsField("width", QVariant.Double), - QgsField("order", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - QgsField("time_start", QVariant.Double), - QgsField("time_end", QVariant.Double), - QgsField("head_transient", QVariant.Double), - QgsField("timeseries_id", QVariant.Int), - ) - ttim_attributes = ( - QgsField("timeseries_id", QVariant.Int), - QgsField("time_start", QVariant.Double), - QgsField("head", QVariant.Double), - ) - timml_defaults = { - "order": QgsDefaultValue("4"), - } - transient_columns = ( - "time_start", - "time_end", - "head_transient", - "timeseries_id", - ) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.line_renderer(color=BLUE, width="0.75") - - -class LineSinkDitch(TransientElement): - element_type = "Line Sink Ditch" - geometry_type = "Linestring" - timml_attributes = ( - QgsField("discharge", QVariant.Double), - QgsField("resistance", QVariant.Double), - QgsField("width", QVariant.Double), - QgsField("order", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - QgsField("time_start", QVariant.Double), - QgsField("time_end", QVariant.Double), - QgsField("discharge_transient", QVariant.Double), - QgsField("timeseries_id", QVariant.Int), - ) - ttim_attributes = ( - QgsField("timeseries_id", QVariant.Int), - QgsField("time_start", QVariant.Double), - QgsField("discharge", QVariant.Double), - ) - timml_defaults = { - "order": QgsDefaultValue("4"), - } - transient_columns = ( - "time_start", - "time_end", - "discharge_transient", - "timeseries_id", - ) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.line_renderer(color=GREEN, width="0.75") - - -class ImpermeableLineDoublet(Element): - element_type = "Impermeable Line Doublet" - geometry_type = "Linestring" - timml_attributes = ( - QgsField("order", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - ) - timml_defaults = { - "order": QgsDefaultValue("4"), - } - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.line_renderer(color=RED, width="0.75") - - -class LeakyLineDoublet(Element): - element_type = "Leaky Line Doublet" - geometry_type = "Linestring" - timml_attributes = ( - QgsField("resistance", QVariant.Double), - QgsField("order", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - ) - timml_defaults = { - "order": QgsDefaultValue("4"), - } - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.line_renderer(color=RED, width="0.75", outline_style="dash") - - -class CircularAreaSink(TransientElement): - element_type = "Circular Area Sink" - geometry_type = "Polygon" - timml_attributes = ( - QgsField("rate", QVariant.Double), - QgsField("layer", QVariant.Int), - QgsField("label", QVariant.String), - QgsField("time_start", QVariant.Double), - QgsField("time_end", QVariant.Double), - QgsField("rate_transient", QVariant.Double), - QgsField("timeseries_id", QVariant.Int), - ) - ttim_attributes = ( - QgsField("timeseries_id", QVariant.Int), - QgsField("time_start", QVariant.Double), - QgsField("rate", QVariant.Double), - ) - transient_columns = ( - "time_start", - "time_end", - "rate_transient", - "timeseries_id", - ) - - @classmethod - def renderer(cls): - return cls.polygon_renderer( - color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" - ) - - -class PolygonAreaSink(Element): - element_type = "Polygon Area Sink" - geometry_type = "Polygon" - timml_attributes = ( - QgsField("rate", QVariant.Double), - QgsField("order", QVariant.Int), - QgsField("ndegrees", QVariant.Int), - ) - timml_defaults = { - "order": QgsDefaultValue("4"), - "ndegrees": QgsDefaultValue("6"), - } - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.polygon_renderer( - color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" - ) - - -class PolygonSemiConfinedTop(Element): - element_type = "Polygon Semi-Confined Top" - geometry_type = "Polygon" - timml_attributes = ( - QgsField("aquitard_c", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - QgsField("order", QVariant.Int), - QgsField("ndegrees", QVariant.Int), - ) - timml_defaults = { - "order": QgsDefaultValue("4"), - "ndegrees": QgsDefaultValue("6"), - } - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.polygon_renderer( - color=TRANSPARENT_BLUE, color_border=BLUE, width_border="0.75" - ) - - -class PolygonInhomogeneity(AssociatedElement): - element_type = "Polygon Inhomogeneity" - geometry_type = "Polygon" - timml_attributes = ( - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("order", QVariant.Int), - QgsField("ndegrees", QVariant.Int), - ) - assoc_attributes = [ - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - QgsField("rate", QVariant.Double), - QgsField("aquitard_s", QVariant.Double), - QgsField("aquifer_s", QVariant.Double), - QgsField("aquitard_npor", QVariant.Double), - QgsField("aquifer_npor", QVariant.Double), - ] - timml_defaults = { - "order": QgsDefaultValue("4"), - "ndegrees": QgsDefaultValue("6"), - "inhomogeneity_id": QgsDefaultValue("1"), - } - assoc_defaults = { - "inhomogeneity_id": QgsDefaultValue("1"), - } - transient_columns = ( - "aquitard_s", - "aquifer_s", - "aquitard_npor", - "aquifer_npor", - ) - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.polygon_renderer( - color=TRANSPARENT_GREY, color_border=GREY, width_border="0.75" - ) - - def on_transient_changed(self, transient: bool): - config = self.assoc_layer.attributeTableConfig() - columns = config.columns() - - for i, column in enumerate(columns): - if column.name in self.transient_columns: - config.setColumnHidden(i, not transient) - - self.assoc_layer.setAttributeTableConfig(config) - return - - -class AbstractBuildingPit(AssociatedElement, abc.ABC): - geometry_type = "Polygon" - timml_attributes = ( - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("order", QVariant.Int), - QgsField("ndegrees", QVariant.Int), - QgsField("layer", QVariant.Int), - ) - timml_defaults = { - "inhomogeneity_id": QgsDefaultValue("1"), - "order": QgsDefaultValue("4"), - "ndegrees": QgsDefaultValue("6"), - } - assoc_defaults = { - "inhomogeneity_id": QgsDefaultValue("1"), - } - transient_columns = ( - "aquitard_s", - "aquifer_s", - "aquitard_npor", - "aquifer_npor", - ) - - def on_transient_changed(self, transient: bool): - config = self.assoc_layer.attributeTableConfig() - columns = config.columns() - - for i, column in enumerate(columns): - if column.name in self.transient_columns: - config.setColumnHidden(i, not transient) - - self.assoc_layer.setAttributeTableConfig(config) - return - - -class BuildingPit(AbstractBuildingPit): - element_type = "Building Pit" - assoc_attributes = [ - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - ] - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.polygon_renderer( - color=TRANSPARENT_RED, color_border=RED, width_border="0.75" - ) - - -class LeakyBuildingPit(AbstractBuildingPit): - element_type = "Leaky Building Pit" - assoc_attributes = [ - QgsField("inhomogeneity_id", QVariant.Int), - QgsField("layer", QVariant.Int), - QgsField("aquifer_top", QVariant.Double), - QgsField("aquifer_bottom", QVariant.Double), - QgsField("aquitard_c", QVariant.Double), - QgsField("aquifer_k", QVariant.Double), - QgsField("resistance", QVariant.Double), - QgsField("semiconf_top", QVariant.Double), - QgsField("semiconf_head", QVariant.Double), - ] - - @classmethod - def renderer(cls) -> QgsSingleSymbolRenderer: - return cls.polygon_renderer( - color=TRANSPARENT_RED, - color_border=RED, - width_border="0.75", - outline_style="dash", - ) - - -ELEMENTS = { - element.element_type: element - for element in ( - Aquifer, - Domain, - Constant, - UniformFlow, - Well, - HeadWell, - HeadLineSink, - LineSinkDitch, - CircularAreaSink, - ImpermeableLineDoublet, - LeakyLineDoublet, - PolygonAreaSink, - PolygonSemiConfinedTop, - PolygonInhomogeneity, - BuildingPit, - LeakyBuildingPit, - HeadObservation, - ) -} - - -def parse_name(layername: str) -> Tuple[str, str, str]: - """ - Based on the layer name find out: - - * whether it's a timml or ttim element; - * which element type it is; - * what the user provided name is. - - For example: - parse_name("timml Headwell: drainage") -> ("timml", "Head Well", "drainage") - - This function can also be found in gistim.common - """ - prefix, name = layername.split(":") - element_type = re.split("timml |ttim ", prefix)[1] - mapping = { - "Computation Times": "Domain", - "Temporal Settings": "Aquifer", - "Polygon Inhomogeneity Properties": "Polygon Inhomogeneity", - "Building Pit Properties": "Building Pit", - "Leaky Building Pit Properties": "Building Pit", - } - element_type = mapping.get(element_type, element_type) - if "timml" in prefix: - if "Properties" in prefix: - tim_type = "timml_assoc" - else: - tim_type = "timml" - elif "ttim" in prefix: - tim_type = "ttim" - else: - raise ValueError("Neither timml nor ttim in layername") - return tim_type, element_type, name - - -def load_elements_from_geopackage(path: str) -> List[Element]: - # List the names in the geopackage - gpkg_names = geopackage.layers(path) - - # Group them on the basis of name - dd = defaultdict - grouped_names = dd(partial(dd, partial(dd, list))) - for layername in gpkg_names: - tim_type, element_type, name = parse_name(layername) - grouped_names[element_type][name][tim_type] = layername - - elements = [] - for element_type, group in grouped_names.items(): - for name in group: - elements.append(ELEMENTS[element_type](path, name)) - - return elements From 0e47ed2a31d0cd4970eb45624872409fb0901d13 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 26 Oct 2023 13:48:05 +0200 Subject: [PATCH 33/41] Readd TTim support. * Move netcdf utils into separate module * Rename extraction to_dict -> table_to_records * Add process_ttim_row on elements --- gistim/compute.py | 305 ++++++++++-------- gistim/netcdf.py | 47 +++ plugin/qgistim/core/elements/aquifer.py | 15 +- .../core/elements/circular_area_sink.py | 17 +- plugin/qgistim/core/elements/domain.py | 9 +- plugin/qgistim/core/elements/element.py | 79 +++-- .../qgistim/core/elements/head_line_sink.py | 27 +- plugin/qgistim/core/elements/headwell.py | 16 + .../core/elements/impermeable_line_doublet.py | 4 +- .../qgistim/core/elements/line_sink_ditch.py | 10 + plugin/qgistim/core/elements/observation.py | 9 + plugin/qgistim/core/elements/well.py | 41 +-- plugin/qgistim/core/extractor.py | 20 +- plugin/qgistim/core/formatting.py | 241 ++++++++++---- plugin/qgistim/widgets/dataset_widget.py | 64 ++-- 15 files changed, 604 insertions(+), 300 deletions(-) create mode 100644 gistim/netcdf.py diff --git a/gistim/compute.py b/gistim/compute.py index ddc8be8..3220bbc 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -1,7 +1,8 @@ import json import pathlib from collections import defaultdict -from typing import Any, Dict, Union +from functools import singledispatch +from typing import Any, Dict, List, Union import numpy as np import pandas as pd @@ -11,7 +12,7 @@ import xarray as xr from gistim.geopackage import CoordinateReferenceSystem, write_geopackage -from gistim.ugrid import to_ugrid2d +from gistim.netcdf import write_raster, write_ugrid TIMML_MAPPING = { "Constant": timml.Constant, @@ -27,92 +28,55 @@ "BuildingPit": timml.BuildingPit, "LeakyBuildingPit": timml.LeakyBuildingPit, } +TTIM_MAPPING = { + "CircAreaSink": ttim.CircAreaSink, + "Well": ttim.Well, + "HeadWell": ttim.HeadWell, + "HeadLineSinkString": ttim.HeadLineSinkString, + "LineSinkDitchString": ttim.LineSinkDitchString, + "LeakyLineDoubletString": ttim.LeakyLineDoubletString, +} -def write_raster( - head: xr.DataArray, - crs: CoordinateReferenceSystem, - outpath: Union[pathlib.Path, str], -) -> None: - # Write GDAL required metadata. - head["spatial_ref"] = xr.DataArray( - np.int32(0), attrs={"crs_wkt": crs.wkt, "spatial_ref": crs.wkt} - ) - head.attrs["grid_mapping"] = "spatial_ref" - head["x"].attrs = { - "axis": "X", - "long_name": "x coordinate of projection", - "standard_name": "projection_x_coordinate", - } - head["y"].attrs = { - "axis": "Y", - "long_name": "y coordinate of projection", - "standard_name": "projection_y_coordinate", - } - head.to_netcdf(outpath.with_suffix(".nc"), format="NETCDF3_CLASSIC") - return - - -def write_ugrid( - head: xr.DataArray, - crs: CoordinateReferenceSystem, - outpath: Union[pathlib.Path, str], -) -> None: - ugrid_head = to_ugrid2d(head) - # Write MDAL required metadata. - ugrid_head["projected_coordinate_system"] = xr.DataArray( - data=np.int32(0), - attrs={"epsg": np.int32(crs.srs_id)}, - ) - ugrid_head.to_netcdf(outpath.with_suffix(".ugrid.nc"), format="NETCDF3_CLASSIC") - return - - -def write_vector( - gdf_head, - crs: int, - outpath: Union[pathlib.Path, str], - layername: str, -) -> None: - if len(gdf_head.index) > 0: - gdf_head = gdf_head.set_crs(crs) - gdf_head.to_file( - outpath.with_suffix(".output.gpkg"), - driver="GPKG", - layer=layername, - ) - return - - -def initialize_timml(data): - aquifer = data.pop("ModelMaq") - timml_model = timml.ModelMaq(**aquifer) +def initialize_elements(model, mapping, data): elements = defaultdict(list) for name, entry in data.items(): - klass = TIMML_MAPPING[entry["type"]] + klass = mapping[entry["type"]] for kwargs in entry["data"]: - element = klass(model=timml_model, **kwargs) + element = klass(model=model, **kwargs) elements[name].append(element) - return timml_model, elements + return elements -def timml_head_observations( - model: timml.Model, observations: Dict -) -> Dict[str, pd.DataFrame]: - d = {"geometry": [], "label": []} - heads = [] - for kwargs in observations: - x = kwargs["x"] - y = kwargs["y"] - heads.append(model.head(x=x, y=y)) - d["geometry"].append({"type": "Point", "coordinates": [x, y]}) - d["label"].append(kwargs["label"]) - for i, layerhead in enumerate(np.vstack(heads).T): - d[f"head_layer{i}"] = layerhead - return pd.DataFrame(d) +def initialize_timml(data): + aquifer = data.pop("ModelMaq") + timml_model = timml.ModelMaq(**aquifer) + elements = initialize_elements(timml_model, TIMML_MAPPING, data) + return timml_model, elements -def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.DataArray: +def initialize_ttim(data, timml_model): + aquifer = data.pop("ModelMaq") + ttim_model = ttim.ModelMaq(**aquifer, timmlmodel=timml_model) + elements = initialize_elements(ttim_model, TTIM_MAPPING, data) + return ttim_model, elements + + +@singledispatch +def headgrid(model, **kwargs): + raise TypeError("Expected timml or ttim model") + + +@headgrid.register +def _( + model: timml.Model, + xmin: float, + xmax: float, + ymin: float, + ymax: float, + spacing: float, + **_, +) -> xr.DataArray: """ Compute the headgrid of the TimML model, and store the results in an xarray DataArray with the appropriate dimensions. @@ -146,7 +110,94 @@ def timml_headgrid(model: timml.Model, xmin, xmax, ymin, ymax, spacing) -> xr.Da ) -def extract_discharges(elements, nlayers): +@headgrid.register +def _( + model: ttim.ModelMaq, + xmin: float, + xmax: float, + ymin: float, + ymax: float, + spacing: float, + reference_date: str, + time: List[float], +) -> xr.DataArray: + x = np.arange(xmin, xmax, spacing) + 0.5 * spacing + # In geospatial rasters, y is DECREASING with row number + y = np.arange(ymax, ymin, -spacing) - 0.5 * spacing + nlayer = model.aq.find_aquifer_data(x[0], y[0]).naq + layer = [i for i in range(nlayer)] + head = np.empty((nlayer, len(time), y.size, x.size), dtype=np.float64) + for i in tqdm.tqdm(range(y.size)): + for j in range(x.size): + head[:, :, i, j] = model.head(x[j], y[i], time, layer) + + time = pd.to_datetime(reference_date) + pd.to_timedelta(time, "D") + return xr.DataArray( + data=head, + name="head", + coords={"layer": layer, "time": time, "y": y, "x": x}, + dims=("layer", "time", "y", "x"), + ) + + +@singledispatch +def head_observations(model, observations): + raise TypeError("Expected timml or ttim model") + + +@head_observations.register +def _( + model: timml.Model, + observations: Dict, + **_, +) -> Dict[str, pd.DataFrame]: + d = {"geometry": [], "label": []} + heads = [] + for kwargs in observations: + x = kwargs["x"] + y = kwargs["y"] + heads.append(model.head(x=x, y=y)) + d["geometry"].append({"type": "Point", "coordinates": [x, y]}) + d["label"].append(kwargs["label"]) + for i, layerhead in enumerate(np.vstack(heads).T): + d[f"head_layer{i}"] = layerhead + return pd.DataFrame(d) + + +@head_observations.register +def _( + model: ttim.ModelMaq, observations: Dict, reference_date: pd.Timestamp +) -> Dict[str, pd.DataFrame]: + d = { + "geometry": [], + "datetime_start": [], + "datetime_end": [], + "label": [], + "observation_id": [], + } + heads = [] + reference_date = pd.to_datetime(reference_date, utc=False) + for observation_id, kwargs in enumerate(observations): + x = kwargs["x"] + y = kwargs["y"] + t = kwargs["t"] + n_time = len(t) + datetime = reference_date + pd.to_timedelta([0] + t, "day") + d["geometry"].extend([{"type": "Point", "coordinates": [x, y]}] * n_time) + d["datetime_start"].extend(datetime[:-1]) + d["datetime_end"].extend(datetime[1:] - pd.to_timedelta(1, "minute")) + d["label"].extend([kwargs["label"]] * n_time) + d["observation_id"].extend([observation_id] * n_time) + heads.append(model.head(x=x, y=y, t=t)) + + for i, layerhead in enumerate(np.hstack(heads)): + d[f"head_layer{i}"] = layerhead + + df = pd.DataFrame(d) + return df + + +def extract_discharges(elements, nlayers, **_): tables = {} for layername, content in elements.items(): sample = content[0] @@ -187,25 +238,20 @@ def extract_discharges(elements, nlayers): return tables -def compute_steady( +def write_output( + model: Union[timml.Model, ttim.ModelMaq], + elements: Dict[str, Any], + data: Dict[str, Any], path: Union[pathlib.Path, str], -) -> None: - with open(path, "r") as f: - data = json.load(f) - - output_options = data.pop("output_options") - headgrid = data.pop("headgrid") - crs = CoordinateReferenceSystem(**data.pop("crs")) - observation_names = [ - key for key, value in data.items() if value.get("type") == "Head Observation" - ] - observations = {name: data.pop(name) for name in observation_names} - timml_model, elements = initialize_timml(data) - timml_model.solve() +): + crs = CoordinateReferenceSystem(**data["crs"]) + output_options = data["output_options"] + observations = data["observations"] + reference_date = pd.to_datetime(data.get("reference_date")) # Compute gridded head data and write to netCDF. if output_options["raster"] or output_options["mesh"]: - head = timml_headgrid(timml_model, **headgrid) + head = headgrid(model, **data["headgrid"], reference_date=reference_date) if output_options["raster"]: write_raster(head, crs, path) @@ -214,60 +260,57 @@ def compute_steady( # Compute observations and discharge, and write to geopackage. if output_options["discharge"]: - tables = extract_discharges(elements, timml_model.aq.nlayers) + tables = extract_discharges( + elements, model.aq.nlayers, reference_date=reference_date + ) else: tables = {} if output_options["head_observations"] and observations: for layername, content in observations.items(): - tables[layername] = timml_head_observations(timml_model, content["data"]) + tables[layername] = head_observations( + model, content["data"], reference_date=reference_date + ) write_geopackage(tables, crs, path) return -def compute_transient( - inpath: Union[pathlib.Path, str], - outpath: Union[pathlib.Path, str], - cellsize: float, - active_elements: Dict[str, bool], +def compute_steady( + path: Union[pathlib.Path, str], ) -> None: - inpath = pathlib.Path(inpath) - outpath = pathlib.Path(outpath) + with open(path, "r") as f: + data = json.load(f) - timml_spec, ttim_spec = gistim.model_specification(inpath, active_elements) - timml_model, _, observations = gistim.timml_elements.initialize_model(timml_spec) - ttim_model, _, observations = gistim.ttim_elements.initialize_model( - ttim_spec, timml_model - ) + timml_model, elements = initialize_timml(data["timml"]) timml_model.solve() - ttim_model.solve() - extent, crs = gistim.gridspec(inpath, cellsize) - refdate = ttim_spec.temporal_settings["reference_date"].iloc[0] - gdfs_obs = gistim.ttim_elements.head_observations(ttim_model, refdate, observations) - - # If no output times are specified, just compute the TimML steady-state - # heads. - if len(ttim_spec.output_times) > 0: - head = gistim.ttim_elements.headgrid( - ttim_model, - extent, - cellsize, - ttim_spec.output_times, - refdate, - ) - write_raster(head, crs, outpath) - write_ugrid(head, crs, outpath) - else: - head = gistim.timml_elements.headgrid(timml_model, extent, cellsize) + write_output( + timml_model, + elements, + data, + path, + ) + return - write_raster(head, crs, outpath) - write_ugrid(head, crs, outpath) - for name, gdf in gdfs_obs.items(): - write_vector(gdf, crs, outpath, layername=name) +def compute_transient( + path: Union[pathlib.Path, str], +) -> None: + with open(path, "r") as f: + data = json.load(f) + + timml_model, _ = initialize_timml(data["timml"]) + ttim_model, ttim_elements = initialize_ttim(data["ttim"], timml_model) + timml_model.solve() + ttim_model.solve() + write_output( + ttim_model, + ttim_elements, + data, + path, + ) return diff --git a/gistim/netcdf.py b/gistim/netcdf.py new file mode 100644 index 0000000..fe9d9e5 --- /dev/null +++ b/gistim/netcdf.py @@ -0,0 +1,47 @@ +import pathlib +from typing import Union + +import numpy as np +import xarray as xr + +from gistim.geopackage import CoordinateReferenceSystem +from gistim.ugrid import to_ugrid2d + + +def write_raster( + head: xr.DataArray, + crs: CoordinateReferenceSystem, + outpath: Union[pathlib.Path, str], +) -> None: + # Write GDAL required metadata. + head["spatial_ref"] = xr.DataArray( + np.int32(0), attrs={"crs_wkt": crs.wkt, "spatial_ref": crs.wkt} + ) + head.attrs["grid_mapping"] = "spatial_ref" + head["x"].attrs = { + "axis": "X", + "long_name": "x coordinate of projection", + "standard_name": "projection_x_coordinate", + } + head["y"].attrs = { + "axis": "Y", + "long_name": "y coordinate of projection", + "standard_name": "projection_y_coordinate", + } + head.to_netcdf(outpath.with_suffix(".nc"), format="NETCDF3_CLASSIC") + return + + +def write_ugrid( + head: xr.DataArray, + crs: CoordinateReferenceSystem, + outpath: Union[pathlib.Path, str], +) -> None: + ugrid_head = to_ugrid2d(head) + # Write MDAL required metadata. + ugrid_head["projected_coordinate_system"] = xr.DataArray( + data=np.int32(0), + attrs={"epsg": np.int32(crs.srs_id)}, + ) + ugrid_head.to_netcdf(outpath.with_suffix(".ugrid.nc"), format="NETCDF3_CLASSIC") + return diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py index e8ba1cc..e2501f7 100644 --- a/plugin/qgistim/core/elements/aquifer.py +++ b/plugin/qgistim/core/elements/aquifer.py @@ -1,3 +1,5 @@ +from typing import Any, Dict + from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField from qgistim.core import geopackage @@ -33,7 +35,9 @@ class AquiferSchema(ElementSchema): "aquitard_s": OffsetAllRequired(Positive()), "aquifer_s": AllRequired(Positive()), } - ttim_consistency_schemata = (SingleRow(),) + + +# ttim_consistency_schemata = (SingleRow(),) class Aquifer(TransientElement): @@ -95,4 +99,11 @@ def to_timml(self): def to_ttim(self): data = self.table_to_dict(self.timml_layer) errors = self.schema.validate_ttim(data) - return errors, data + time_data = self.table_to_records(self.ttim_layer)[0] + return errors, {**data, **time_data} + + def extract_data(self, transient: bool) -> Dict[str, Any]: + if transient: + return self.to_ttim() + else: + return self.to_timml() diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index 49f7087..89d39e7 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -69,15 +69,30 @@ def renderer(cls): color=TRANSPARENT_GREEN, color_border=GREEN, width_border="0.75" ) - def process_timml_row(self, row, other=None) -> Dict[str, Any]: + def _centroid_and_radius(self, row): # Take the first vertex. x, y = self.point_xy(row) # Compare with the centroid to derive radius. xc, yc = row["centroid"] radius = ((x - xc) ** 2 + (y - yc) ** 2) ** 0.5 + return xc, yc, radius + + def process_timml_row(self, row, other=None) -> Dict[str, Any]: + xc, yc, radius = self._centroid_and_radius(row) return { "xc": xc, "yc": yc, "R": radius, "N": row["rate"], + "label": row["label"], + } + + def process_ttim_row(self, row, grouped): + xc, yc, radius = self._centroid_and_radius(row) + return { + "xc": xc, + "yc": yc, + "R": radius, + "tsandN": self.transient_input(row, grouped, "rate"), + "label": row["label"], } diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index f0a1994..b86f7db 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -66,7 +66,7 @@ def update_extent(self, iface: Any) -> Tuple[float, float]: return ymax, ymin def to_timml(self, other): - data = self.to_dict(self.timml_layer) + data = self.table_to_records(self.timml_layer) errors = self.schema.validate_timml(data, other) if errors: return errors, None @@ -80,8 +80,9 @@ def to_timml(self, other): "ymax": max(y), } - def to_ttim(self): - data = self.to_timml() + def to_ttim(self, other): + _, data = self.to_timml(other) ttim_data = self.table_to_dict(self.ttim_layer) + ttim_errors = self.schema.validate_ttim(ttim_data, other) data["time"] = ttim_data["time"] - return data + return ttim_errors, data diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index c8a8feb..416dc35 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -320,7 +320,6 @@ def on_transient_changed(self, _): @staticmethod def groupby(data: Dict[str, Any], key: str): - # TODO: sort by layer or time? data = deepcopy(data) grouper = data.pop(key) grouped = {k: defaultdict(list) for k in grouper} @@ -330,7 +329,7 @@ def groupby(data: Dict[str, Any], key: str): return grouped def to_timml(self, other=None): - data = self.to_dict(self.timml_layer) + data = self.table_to_records(self.timml_layer) errors = self.schema.validate_timml(data, other) if errors: return errors, data @@ -341,8 +340,16 @@ def to_timml(self, other=None): def to_ttim(self, other=None): return self.to_timml(other) + def extract_data(self, transient: bool, other=None) -> Union[List, Dict]: + if transient: + return self.to_ttim(other) + else: + return self.to_timml(other) + @staticmethod def aquifer_data(data, transient: bool): + """Used by the Aquifer element and the Inhomogeneities.""" + def interleave(a, b): return [value for pair in zip(a, b) for value in pair] @@ -377,15 +384,18 @@ def interleave(a, b): "kaq": kaq, "z": z, "c": c, - "npor": porosity, + "topboundary": topboundary, } if transient: aquifer["Saq"] = s_aquifer aquifer["Sll"] = s_aquitard aquifer["phreatictop"] = True + aquifer["tmin"] = data["time_min"] + aquifer["tstart"] = 0.0 + aquifer["M"] = data["laplace_inversion_M"] else: - aquifer["topboundary"] = topboundary + aquifer["npor"] = porosity aquifer["hstar"] = hstar if "resistance" in data: @@ -431,8 +441,22 @@ def remove_from_geopackage(self): geopackage.remove_layer(self.path, self.timml_name) geopackage.remove_layer(self.path, self.ttim_name) + def on_transient_changed(self, transient: bool): + if len(self.transient_columns) == 0: + return + + config = self.timml_layer.attributeTableConfig() + columns = config.columns() + + for i, column in enumerate(columns): + if column.name in self.transient_columns: + config.setColumnHidden(i, not transient) + + self.timml_layer.setAttributeTableConfig(config) + return + @staticmethod - def transient_input(row, all_timeseries, variable: str, time_start): + def transient_input(row, all_timeseries: Dict[str, Any], variable: str): timeseries_id = row["timeseries_id"] row_start = row["time_start"] row_end = row["time_end"] @@ -454,38 +478,29 @@ def transient_input(row, all_timeseries, variable: str, time_start): elif start_and_stop: return [(row_start, transient_value - steady_value), (row_end, 0.0)] else: - return [(time_start), 0.0] + return [(0.0, 0.0)] - def on_transient_changed(self, transient: bool): - if len(self.transient_columns) == 0: - return - - config = self.timml_layer.attributeTableConfig() - columns = config.columns() - for i, column in enumerate(columns): - if column.name in self.transient_columns: - config.setColumnHidden(i, not transient) + def to_ttim(self, other): + timeseries = self.table_to_dict(self.ttim_layer) + if timeseries: + errors = self.schema.validate_timeseries(timeseries) + if errors: + return errors, None - self.timml_layer.setAttributeTableConfig(config) - return + grouped = self.groupby(timeseries, "timeseries_id") + # Store timeseries in other dict for validation. + other = other.copy() + other["timeseries_ids"] = set(timeseries["timeseries_id"]) - def to_ttim(self, other=None): - # Check timml input and timeseries input first. - timml_errors, data = self.to_timml(other) - timeseries = self.table_to_dict(self.ttim_layer) - timeseries_errors = self.schema.validate_timeseries(timeseries) - errors = {**timml_errors, **timeseries_errors} - if errors: - return errors, None + else: + grouped = {} - other = other.copy() - other["timeseries_ids"] = set([row["timeseries_id"] for row in timeseries]) - errors = self.schema.validate_ttim(data, other) - if errors: - return errors, None + data = self.table_to_records(self.timml_layer) + # errors = self.schema.validate_ttim(data, other) + # if errors: + # return errors, None - grouped = self.groupby(timeseries, "timeseries_id") elements = [self.process_ttim_row(row, grouped) for row in data] return None, elements @@ -536,7 +551,7 @@ def to_timml(self, other): other = other.copy() # Avoid side-effects other["properties inhomogeneity_id"] = list(set(properties["inhomogeneity_id"])) - data = self.to_dict(self.timml_layer) + data = self.table_to_records(self.timml_layer) errors = self.schema.validate_timml(data, other) if errors: return errors, None diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index bcf95db..d0b29ff 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -82,21 +82,12 @@ def process_timml_row(self, row, other=None): "label": row["label"], } - def to_ttim(self, time_start): - data = self.to_dict(self.timml_layer) - ttim_data = self.table_to_dict(self.ttim_layer) - grouped = self.groupby(ttim_data, "timeseries_id") - sinks = [] - for row in data: - sinks.append( - { - "xy": self.linestring_xy(row), - "tsandh": self._transient_input(row, grouped, "head", time_start), - "res": row["resistance"], - "wh": row["width"], - "order": row["order"], - "layers": row["layer"], - "label": row["label"], - } - ) - return sinks + def process_ttim_row(self, row, grouped): + return { + "xy": self.linestring_xy(row), + "tsandh": self.transient_input(row, grouped, "head"), + "res": row["resistance"], + "wh": row["width"], + "layers": row["layer"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 9713744..8d48a3a 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -86,6 +86,22 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: "label": row["label"], } + def process_ttim_row(self, row, grouped): + x, y = self.point_xy(row) + return { + "xw": x, + "yw": y, + "tsandh": self.transient_input( + row, + grouped, + "head", + ), + "rw": row["radius"], + "res": row["resistance"], + "layers": row["layer"], + "label": row["label"], + } + class RemoteHeadWell(HeadWell): element_type = "Remote Head Well" diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py index 29266b0..c4383bf 100644 --- a/plugin/qgistim/core/elements/impermeable_line_doublet.py +++ b/plugin/qgistim/core/elements/impermeable_line_doublet.py @@ -40,7 +40,7 @@ def process_timml_row(self, row, other=None): def to_ttim(self, other): # TTim doesn't have an ImpermeableLineDoublet, we need to add "imp" as # the resistance entry. - data = self.to_timml(other) + _, data = self.to_timml(other) out = [] for row in data: out.append( @@ -51,4 +51,4 @@ def to_ttim(self, other): "label": row["label"], } ) - return out + return None, out diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index 8d679d7..a1b3a2b 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -81,3 +81,13 @@ def process_timml_row(self, row, other=None): "layers": row["layer"], "label": row["label"], } + + def process_ttim_row(self, row, grouped): + return { + "xy": self.linestring_xy(row), + "tsandQ": self.transient_input(row, grouped, "discharge"), + "res": row["resistance"], + "wh": row["width"], + "layers": row["layer"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index 8b187d3..48b3603 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -46,3 +46,12 @@ def process_timml_row(self, row, other=None): "y": y, "label": row["label"], } + + def process_ttim_row(self, row, grouped): + x, y = self.point_xy(row) + return { + "x": x, + "y": y, + "t": grouped[row["timeseries_id"]]["time"], + "label": row["label"], + } diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index c390008..3dc00d3 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -94,27 +94,20 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: "label": row["label"], } - def to_ttim(self, time_start): - data = self.to_dict(self.timml_layer) - ttim_data = self.table_to_dict(self.ttim_layer) - grouped = self.groupby(ttim_data, "timeseries_id") - wells = [] - for row in data: - x, y = self.point_xy(row) - wells.append( - { - "xw": x, - "yw": y, - "tsandQ": self._transient_input( - row, grouped, "discharge", time_start - ), - "hw": row["head"], - "rw": row["radius"], - "res": row["resistance"], - "layers": row["layer"], - "label": row["label"], - "rc": row["caisson_radius"], - "wbstype": "slug" if row["slug"] else "pumping", - } - ) - return wells + def process_ttim_row(self, row, grouped): + x, y = self.point_xy(row) + return { + "xw": x, + "yw": y, + "tsandQ": self.transient_input( + row, + grouped, + "discharge", + ), + "rw": row["radius"], + "res": row["resistance"], + "layers": row["layer"], + "label": row["label"], + "rc": row["caisson_radius"], + "wbstype": "slug" if row["slug"] else "pumping", + } diff --git a/plugin/qgistim/core/extractor.py b/plugin/qgistim/core/extractor.py index 396497b..c89a850 100644 --- a/plugin/qgistim/core/extractor.py +++ b/plugin/qgistim/core/extractor.py @@ -1,6 +1,6 @@ import abc from collections import defaultdict -from typing import Any, Dict, List +from typing import Any, Dict, List, Tuple from qgis.core import NULL @@ -11,6 +11,10 @@ def remove_zero_length(geometry): class ExtractorMixin(abc.ABC): + """ + Mixin class to extract all data from QgsVectorLayers. + """ + @staticmethod def argsort(seq): return sorted(range(len(seq)), key=seq.__getitem__) @@ -25,7 +29,7 @@ def extract_coordinates(feature): return (centroid.x(), centroid.y()), coordinates @classmethod - def to_dict(cls, layer) -> List[Dict[str, Any]]: + def table_to_records(cls, layer) -> List[Dict[str, Any]]: features = [] for feature in layer.getFeatures(): data = feature.attributeMap() @@ -47,23 +51,17 @@ def table_to_dict(cls, layer) -> Dict[str, Any]: features[key].append(None) else: features[key].append(value) - - # Sort by layer if present. - # TODO: is this smart? A user may easily miss the layer column. - # if "layer" in features: - # order = cls.argsort(features["layer"]) - # return {k: [v[i] for i in order] for k, v in features.items()} return features @staticmethod - def point_xy(row): + def point_xy(row) -> Tuple[List[float], List[float]]: point = row["geometry"][0] return point[0], point[1] @staticmethod - def linestring_xy(row): + def linestring_xy(row) -> List: return remove_zero_length(row["geometry"]) @staticmethod - def polygon_xy(row): + def polygon_xy(row) -> List: return remove_zero_length(row["geometry"]) diff --git a/plugin/qgistim/core/formatting.py b/plugin/qgistim/core/formatting.py index 0bb83aa..a9ebc44 100644 --- a/plugin/qgistim/core/formatting.py +++ b/plugin/qgistim/core/formatting.py @@ -1,15 +1,16 @@ """ -Format the content of a collection of dictionaries into a Python script. +Format the content of a collection of dictionaries into a Python script or a +dictionary to be serialized to JSON. """ import pprint import re import textwrap -from typing import Any, Dict, Tuple +from typing import Any, Dict, Tuple, Union import numpy as np -MAPPING = { +TIMML_MAPPING = { "Constant": "Constant", "Uniform Flow": "Uflow", "Circular Area Sink": "CircAreaSink", @@ -28,6 +29,20 @@ "Head Observation": "Head Observation", "Discharge Observation": "Discharge Observation", } +# In TTim, a constant or uniform flow may be added, but they have no effect on +# the transient superposed result. +TTIM_MAPPING = { + "Constant": None, + "Uniform Flow": None, + "Circular Area Sink": "CircAreaSink", + "Well": "Well", + "Head Well": "HeadWell", + "Head Line Sink": "HeadLineSinkString", + "Line Sink Ditch": "LineSinkDitchString", + "Leaky Line Doublet": "LeakyLineDoubletString", + "Impermeable Line Doublet": "LeakyLineDoubletString", + "Head Observation": "Head Observation", +} PREFIX = " " @@ -41,6 +56,25 @@ def format_kwargs(data: Dict[str, Any]) -> str: ) +def round_spacing(ymin: float, ymax: float) -> float: + """ + Some reasonable defaults for grid spacing. + + We attempt to get around 50 rows in the computed grid, with grid sizes a + multiple of 1.0, 5.0, 50.0, or 500.0. + """ + dy = (ymax - ymin) / 50.0 + if dy > 500.0: + dy = round(dy / 500.0) * 500.0 + elif dy > 50.0: + dy = round(dy / 50.0) * 50.0 + elif dy > 5.0: # round to five + dy = round(dy / 5.0) * 5.0 + elif dy > 1.0: + dy = round(dy) + return dy + + def round_extent(domain: Dict[str, float], cellsize: float) -> Tuple[float]: """ Increases the extent until all sides lie on a coordinate @@ -81,83 +115,152 @@ def headgrid_entry(domain: Dict[str, float], cellsize: float) -> Dict[str, float "ymin": ymin, "ymax": ymax, "spacing": cellsize, + "time": domain.get("time"), } def headgrid_code(domain) -> Tuple[str, str]: ymin = domain["ymin"] ymax = domain["ymax"] - dy = (ymax - ymin) / 50.0 - # Some reasonable defaults for grid spacing: - if dy > 500.0: - dy = round(dy / 500.0) * 500.0 - elif dy > 50.0: - dy = round(dy / 50.0) * 50.0 - elif dy > 5.0: # round to five - dy = round(dy / 5.0) * 5.0 - elif dy > 1.0: - dy = round(dy) + dy = round_spacing(ymin, ymax) (xmin, xmax, ymin, ymax) = round_extent(domain, dy) xg = textwrap.indent(f"xg=np.arange({xmin}, {xmax}, {dy})", prefix=PREFIX) yg = textwrap.indent(f"yg=np.arange({ymax}, {ymin}, -{dy})", prefix=PREFIX) - return xg, yg + time = domain.get("time") + t = textwrap.indent(f"t={time}", prefix=PREFIX) + return xg, yg, t -def to_script_string(data: Dict[str, Any]) -> str: - """ - Convert the data into a runnable Python script. - - Returns - ------- - data: Dict[str, Any] - All the data for the TimML model. - script: str - A runnable Python script. - """ - data = data.copy() # avoid side-effects - aquifer_data = data.pop("timml Aquifer:Aquifer") - domain_data = data.pop("timml Domain:Domain") - - strings = [ - "import numpy as np", - "import timml", - "", - f"model = timml.ModelMaq(\n{format_kwargs(aquifer_data)}\n)", - ] - - model_string = textwrap.indent("model=model,", prefix=PREFIX) +def elements_and_observations(data, mapping: Dict[str, str], tim: str): + strings = [] observations = [] + model_string = textwrap.indent(f"model={tim}_model,", prefix=PREFIX) for layername, element_data in data.items(): prefix, name = layername.split(":") plugin_name = re.split("timml |ttim ", prefix)[1] - timml_name = MAPPING[plugin_name] + tim_name = mapping[plugin_name] + if tim_name is None: + continue for i, kwargs in enumerate(element_data): if plugin_name == "Head Observation": # Should not be added to the model. + # Would result in e.g.: + # observation_piezometer_0 = timml.head( + # x=10.0, + # y=20.0, + # ) kwargs.pop("label") observations.append( - f"observation_{sanitized(name)}_{i}=model.head(\n{format_kwargs(kwargs)}\n)" + f"observation_{sanitized(name)}_{i}={tim}_model.head(\n{format_kwargs(kwargs)}\n)" ) else: # Has to be added to the model. + # Would result in e.g.: + # timml_extraction_0 = timml.Well( + # model=timml_model, + # ... + # ) kwargs = format_kwargs(kwargs) strings.append( - f"{sanitized(name)}_{i} = timml.{timml_name}(\n{model_string}\n{kwargs}\n)" + f"{tim}_{sanitized(name)}_{i} = {tim}.{tim_name}(\n{model_string}\n{kwargs}\n)" ) - strings.append("\nmodel.solve()\n") + return strings, observations + + +def timml_script_content(data: Dict[str, Any]): + data = data.copy() # avoid side-effects + aquifer_data = data.pop("timml Aquifer:Aquifer") + data.pop("timml Domain:Domain") + + strings = [ + "import numpy as np", + "import timml", + "", + f"timml_model = timml.ModelMaq(\n{format_kwargs(aquifer_data)}\n)", + ] - xg, yg = headgrid_code(domain_data) - strings.append(f"head = model.headgrid(\n{xg},\n{yg}\n)") + element_strings, observations = elements_and_observations( + data, TIMML_MAPPING, tim="timml" + ) + strings = strings + element_strings + return strings, observations + + +def timml_script(data: Dict[str, Any]) -> str: + strings, observations = timml_script_content(data) + strings.append("\ntimml_model.solve()\n") + xg, yg, _ = headgrid_code(data["timml Domain:Domain"]) + strings.append(f"head = timml_model.headgrid(\n{xg},\n{yg}\n)") strings.append("\n") strings.extend(observations) + return "\n".join(strings) + + +def ttim_script(timml_data: Dict[str, Any], ttim_data: Dict[str, Any]) -> str: + strings, _ = timml_script_content(timml_data) + strings.insert(2, "import ttim") + data = ttim_data.copy() # avoid side-effects + aquifer_data = data.pop("timml Aquifer:Aquifer") + domain_data = data.pop("timml Domain:Domain") + data.pop("reference_date") + + strings.append( + f"\nttim_model = ttim.ModelMaq(\n{format_kwargs(aquifer_data)}\n{PREFIX}timmlmodel=timml_model,\n)" + ) + + element_strings, observations = elements_and_observations( + data, TTIM_MAPPING, tim="ttim" + ) + strings = strings + element_strings + strings.append("\ntimml_model.solve()\nttim_model.solve()\n") + + if domain_data.get("time"): + xg, yg, t = headgrid_code(domain_data) + strings.append(f"head = ttim_model.headgrid(\n{xg},\n{yg},\n{t}\n)") + strings.append("\n") + + strings.extend(observations) return "\n".join(strings) -def to_json( - data: Dict[str, Any], cellsize: float, output_options: Dict[str, bool] +def data_to_script( + timml_data: Dict[str, Any], + ttim_data: Union[Dict[str, Any], None], +) -> str: + if ttim_data is None: + return timml_script(timml_data) + else: + return ttim_script(timml_data, ttim_data) + + +def json_elements_and_observations(data, mapping: Dict[str, str]): + aquifer_data = data.pop("timml Aquifer:Aquifer") + + observations = {} + tim_data = {"ModelMaq": aquifer_data} + for layername, element_data in data.items(): + prefix, name = layername.split(":") + plugin_name = re.split("timml |ttim ", prefix)[1] + tim_name = mapping[plugin_name] + if tim_name is None: + continue + + entry = {"type": tim_name, "name": name, "data": element_data} + if tim_name == "Head Observation": + observations[layername] = entry + else: + tim_data[layername] = entry + + return tim_data, observations + + +def timml_json( + timml_data: Dict[str, Any], + cellsize: float, + output_options: Dict[str, bool], ) -> Dict[str, Any]: """ Take the data and add: @@ -174,20 +277,48 @@ def to_json( json_data: Dict[str, Any] Data ready to dump to JSON. """ - data = data.copy() # avoid side-effects - aquifer_data = data.pop("timml Aquifer:Aquifer") + # Process TimML elements + data = timml_data.copy() # avoid side-effects domain_data = data.pop("timml Domain:Domain") - + timml_json, observations = json_elements_and_observations( + data, mapping=TIMML_MAPPING + ) json_data = { - "ModelMaq": aquifer_data, - "headgrid": headgrid_entry(domain_data, cellsize), + "timml": timml_json, "output_options": output_options, + "headgrid": headgrid_entry(domain_data, cellsize), + "observations": observations, } + return json_data - for layername, element_data in data.items(): - prefix, name = layername.split(":") - plugin_name = re.split("timml |ttim ", prefix)[1] - timml_name = MAPPING[plugin_name] - json_data[layername] = {"type": timml_name, "name": name, "data": element_data} +def ttim_json( + timml_data: Dict[str, Any], + ttim_data: Dict[str, Any], + cellsize: float, + output_options: Dict[str, bool], +) -> Dict[str, Any]: + json_data = timml_json(timml_data, cellsize, output_options) + + data = ttim_data.copy() + domain_data = data.pop("timml Domain:Domain") + reference_date = data.pop("reference_date") + ttim_json, observations = json_elements_and_observations(data, mapping=TTIM_MAPPING) + + json_data["ttim"] = ttim_json + json_data["headgrid"] = headgrid_entry(domain_data, cellsize) + json_data["reference_date"] = reference_date + json_data["observations"] = observations return json_data + + +def data_to_json( + timml_data: Dict[str, Any], + ttim_data: Union[Dict[str, Any], None], + cellsize: float, + output_options: Dict[str, bool], +) -> Dict[str, Any]: + if ttim_data is None: + return timml_json(timml_data, cellsize, output_options) + else: + return ttim_json(timml_data, ttim_data, cellsize, output_options) diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 616f913..1c8d130 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -13,7 +13,7 @@ import json from pathlib import Path from shutil import copy -from typing import Dict, List, Set +from typing import Any, Dict, List, Set, Tuple from PyQt5.QtCore import Qt from PyQt5.QtWidgets import ( @@ -34,7 +34,7 @@ ) from qgis.core import Qgis, QgsProject from qgistim.core.elements import Aquifer, Domain, load_elements_from_geopackage -from qgistim.core.formatting import to_json, to_script_string +from qgistim.core.formatting import data_to_json, data_to_script from qgistim.widgets.error_window import ValidationDialog SUPPORTED_TTIM_ELEMENTS = set( @@ -193,30 +193,38 @@ def remove_geopackage_layers(self) -> None: return - def convert_to_timml(self): + def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]]: + """ + Extract the data of the Geopackage. + + Validates all data while converting, and returns a list of validation + errors if something is amiss. + """ + data = {} + errors = {} elements = { item.text(1): item.element for item in self.items() if item.timml_checkbox.isChecked() } - data = {} - errors = {} # First convert the aquifer, since we need its data to validate # other elements. name = "timml Aquifer:Aquifer" aquifer = elements.pop(name) - _errors, raw_data = aquifer.to_timml() - other = {"aquifer layers": raw_data["layer"], "global_aquifer": raw_data} + _errors, raw_data = aquifer.extract_data(transient) if _errors: errors[name] = _errors else: - aquifer_data = aquifer.aquifer_data(raw_data, transient=False) + aquifer_data = aquifer.aquifer_data(raw_data, transient=transient) data[name] = aquifer_data + if transient: + data["reference_date"] = str(raw_data["reference_date"].toPyDateTime()) + other = {"aquifer layers": raw_data["layer"], "global_aquifer": raw_data} for name, element in elements.items(): try: - _errors, _data = element.to_timml(other) + _errors, _data = element.extract_data(transient, other) if _errors: errors[name] = _errors elif _data: # skip empty tables @@ -458,27 +466,36 @@ def set_interpreter_interaction(self, value: bool): self.parent.set_interpreter_interaction(value) return - def _convert(self, transient: bool) -> None: + def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]]: if self.validation_dialog: self.validation_dialog.close() self.validation_dialog = None - errors, data = self.dataset_tree.convert_to_timml() + errors, timml_data = self.dataset_tree.extract_data(transient=False) if errors: self.validation_dialog = ValidationDialog(errors) - return None + return None, None + + ttim_data = None + if transient: + errors, ttim_data = self.dataset_tree.extract_data(transient=True) + if errors: + self.validation_dialog = ValidationDialog(errors) + return None, None - return data + return timml_data, ttim_data - def convert_to_python(self, transient: bool) -> None: + def convert_to_python(self) -> None: + transient = self.transient outpath, _ = QFileDialog.getSaveFileName(self, "Select file", "", "*.py") if outpath == "": # Empty string in case of cancel button press return - data = self._convert(transient=transient) - if data is None: + + timml_data, ttim_data = self.extract_data(transient=transient) + if timml_data is None: return - script = to_script_string(data) + script = data_to_script(timml_data, ttim_data) with open(outpath, "w") as f: f.write(script) @@ -511,11 +528,17 @@ def convert_to_json( invalid_input: bool Whether validation has succeeded. """ - data = self._convert(transient=transient) - if data is None: + timml_data, ttim_data = self.extract_data(transient=transient) + if timml_data is None: return True - json_data = to_json(data, cellsize=cellsize, output_options=output_options) + json_data = data_to_json( + timml_data, + ttim_data, + cellsize=cellsize, + output_options=output_options, + ) + crs = self.parent.crs organization, srs_id = crs.authid().split(":") json_data["crs"] = { @@ -524,6 +547,7 @@ def convert_to_json( "srs_id": srs_id, "wkt": crs.toWkt(), } + with open(path, "w") as fp: json.dump(json_data, fp=fp, indent=4) From dfba89ce09032a304e6488fd44ab8b83fd2b939c Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Thu, 26 Oct 2023 15:09:41 +0200 Subject: [PATCH 34/41] Infer tmax for TTim --- .../qgistim/core/elements/circular_area_sink.py | 4 +++- plugin/qgistim/core/elements/domain.py | 2 ++ plugin/qgistim/core/elements/element.py | 17 ++++++++++++----- plugin/qgistim/core/elements/head_line_sink.py | 4 +++- plugin/qgistim/core/elements/headwell.py | 8 +++----- plugin/qgistim/core/elements/line_sink_ditch.py | 4 +++- plugin/qgistim/core/elements/well.py | 8 +++----- plugin/qgistim/widgets/dataset_widget.py | 8 ++++++++ 8 files changed, 37 insertions(+), 18 deletions(-) diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index 89d39e7..86c12d6 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -89,10 +89,12 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: def process_ttim_row(self, row, grouped): xc, yc, radius = self._centroid_and_radius(row) + tsandN, tmax = self.transient_input(row, grouped, "rate") + self.times.append(tmax) return { "xc": xc, "yc": yc, "R": radius, - "tsandN": self.transient_input(row, grouped, "rate"), + "tsandN": tsandN, "label": row["label"], } diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index b86f7db..3aab043 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -28,6 +28,7 @@ class Domain(TransientElement): schema = DomainSchema() def __init__(self, path: str, name: str): + self.times = [] self._initialize_default(path, name) self.timml_name = f"timml {self.element_type}:Domain" self.ttim_name = "ttim Computation Times:Domain" @@ -85,4 +86,5 @@ def to_ttim(self, other): ttim_data = self.table_to_dict(self.ttim_layer) ttim_errors = self.schema.validate_ttim(ttim_data, other) data["time"] = ttim_data["time"] + self.times = [max(data["time"])] return ttim_errors, data diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 416dc35..97e6ba8 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -208,6 +208,7 @@ def _initialize_default(self, path, name): self.item = None def __init__(self, path: str, name: str): + self.times = [] # Store the times to get tmax for TTim. self._initialize_default(path, name) self.timml_name = f"timml {self.element_type}:{name}" @@ -338,6 +339,7 @@ def to_timml(self, other=None): return [], elements def to_ttim(self, other=None): + self.times = [] return self.to_timml(other) def extract_data(self, transient: bool, other=None) -> Union[List, Dict]: @@ -456,7 +458,9 @@ def on_transient_changed(self, transient: bool): return @staticmethod - def transient_input(row, all_timeseries: Dict[str, Any], variable: str): + def transient_input( + row, all_timeseries: Dict[str, Any], variable: str + ) -> Tuple[List[Any], float]: timeseries_id = row["timeseries_id"] row_start = row["time_start"] row_end = row["time_end"] @@ -474,14 +478,17 @@ def transient_input(row, all_timeseries: Dict[str, Any], variable: str): return [ (time, value - steady_value) for time, value in zip(timeseries["time_start"], timeseries[variable]) - ] + ], max(timeseries["time_start"]) elif start_and_stop: - return [(row_start, transient_value - steady_value), (row_end, 0.0)] + return [ + (row_start, transient_value - steady_value), + (row_end, 0.0), + ], row_end else: - return [(0.0, 0.0)] - + return [(0.0, 0.0)], 0.0 def to_ttim(self, other): + self.times = [] timeseries = self.table_to_dict(self.ttim_layer) if timeseries: errors = self.schema.validate_timeseries(timeseries) diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index d0b29ff..2e2fbbd 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -83,9 +83,11 @@ def process_timml_row(self, row, other=None): } def process_ttim_row(self, row, grouped): + tsandh, tmax = self.transient_input(row, grouped, "head") + self.times.append(tmax) return { "xy": self.linestring_xy(row), - "tsandh": self.transient_input(row, grouped, "head"), + "tsandh": tsandh, "res": row["resistance"], "wh": row["width"], "layers": row["layer"], diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 8d48a3a..1f4cbe1 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -88,14 +88,12 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: def process_ttim_row(self, row, grouped): x, y = self.point_xy(row) + tsandh, tmax = self.transient_input(row, grouped, "head") + self.times.append(tmax) return { "xw": x, "yw": y, - "tsandh": self.transient_input( - row, - grouped, - "head", - ), + "tsandh": tsandh, "rw": row["radius"], "res": row["resistance"], "layers": row["layer"], diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index a1b3a2b..a50a963 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -83,9 +83,11 @@ def process_timml_row(self, row, other=None): } def process_ttim_row(self, row, grouped): + tsandQ, tmax = self.transient_input(row, grouped, "discharge") + self.times.append(tmax) return { "xy": self.linestring_xy(row), - "tsandQ": self.transient_input(row, grouped, "discharge"), + "tsandQ": tsandQ, "res": row["resistance"], "wh": row["width"], "layers": row["layer"], diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index 3dc00d3..2029e87 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -96,14 +96,12 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: def process_ttim_row(self, row, grouped): x, y = self.point_xy(row) + tsandQ, tmax = self.transient_input(row, grouped, "discharge") + self.times.append(tmax) return { "xw": x, "yw": y, - "tsandQ": self.transient_input( - row, - grouped, - "discharge", - ), + "tsandQ": tsandQ, "rw": row["radius"], "res": row["resistance"], "layers": row["layer"], diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 1c8d130..3caf5ef 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -239,6 +239,14 @@ def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]] else: raise e + # Collect all the times. They have been set in the element while + # converting. + if transient: + times = [] + for element in elements.values(): + times.extend(element.times) + data["timml Aquifer:Aquifer"]["tmax"] = max(times) + return errors, data From 323e07ed1b1a551a45da01a86414f2ce4f4228a5 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 12:28:20 +0200 Subject: [PATCH 35/41] Allow steady-state head in TTim headgrid. Remove tqdm --- gistim/compute.py | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/gistim/compute.py b/gistim/compute.py index 3220bbc..9f12275 100644 --- a/gistim/compute.py +++ b/gistim/compute.py @@ -7,7 +7,6 @@ import numpy as np import pandas as pd import timml -import tqdm import ttim import xarray as xr @@ -95,13 +94,9 @@ def _( x = np.arange(xmin, xmax, spacing) + 0.5 * spacing # In geospatial rasters, y is DECREASING with row number y = np.arange(ymax, ymin, -spacing) - 0.5 * spacing + head = model.headgrid(xg=x, yg=y) nlayer = model.aq.find_aquifer_data(x[0], y[0]).naq layer = [i for i in range(nlayer)] - head = np.empty((nlayer, y.size, x.size), dtype=np.float64) - for i in tqdm.tqdm(range(y.size)): - for j in range(x.size): - head[:, i, j] = model.head(x[j], y[i], layer) - return xr.DataArray( data=head, name="head", @@ -120,17 +115,25 @@ def _( spacing: float, reference_date: str, time: List[float], -) -> xr.DataArray: +) -> Union[None, xr.DataArray]: + if time is None: + return None + + # Get coordinates ready x = np.arange(xmin, xmax, spacing) + 0.5 * spacing # In geospatial rasters, y is DECREASING with row number y = np.arange(ymax, ymin, -spacing) - 0.5 * spacing nlayer = model.aq.find_aquifer_data(x[0], y[0]).naq - layer = [i for i in range(nlayer)] - head = np.empty((nlayer, len(time), y.size, x.size), dtype=np.float64) - for i in tqdm.tqdm(range(y.size)): - for j in range(x.size): - head[:, :, i, j] = model.head(x[j], y[i], time, layer) + if 0.0 in time: + steady_head = model.timmlmodel.headgrid(xg=x, yg=y)[:, np.newaxis, :, :] + transient_head = model.headgrid(xg=x, yg=y, t=time[1:]) + head = np.hstack((steady_head, transient_head)) + else: + head = model.headgrid(xg=x, yg=y, t=time) + + # Other coordinates + layer = [i for i in range(nlayer)] time = pd.to_datetime(reference_date) + pd.to_timedelta(time, "D") return xr.DataArray( data=head, @@ -250,13 +253,15 @@ def write_output( reference_date = pd.to_datetime(data.get("reference_date")) # Compute gridded head data and write to netCDF. + head = None if output_options["raster"] or output_options["mesh"]: head = headgrid(model, **data["headgrid"], reference_date=reference_date) - if output_options["raster"]: - write_raster(head, crs, path) - if output_options["mesh"]: - write_ugrid(head, crs, path) + if head is not None: + if output_options["raster"]: + write_raster(head, crs, path) + if output_options["mesh"]: + write_ugrid(head, crs, path) # Compute observations and discharge, and write to geopackage. if output_options["discharge"]: From e1349ba6c4dcf0c6060f3349aed67c13cf26627d Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 12:28:36 +0200 Subject: [PATCH 36/41] type hint --- plugin/qgistim/core/extractor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugin/qgistim/core/extractor.py b/plugin/qgistim/core/extractor.py index c89a850..482c12b 100644 --- a/plugin/qgistim/core/extractor.py +++ b/plugin/qgistim/core/extractor.py @@ -2,7 +2,7 @@ from collections import defaultdict from typing import Any, Dict, List, Tuple -from qgis.core import NULL +from qgis.core import NULL, QgsVectorLayer def remove_zero_length(geometry): @@ -29,7 +29,7 @@ def extract_coordinates(feature): return (centroid.x(), centroid.y()), coordinates @classmethod - def table_to_records(cls, layer) -> List[Dict[str, Any]]: + def table_to_records(cls, layer: QgsVectorLayer) -> List[Dict[str, Any]]: features = [] for feature in layer.getFeatures(): data = feature.attributeMap() @@ -43,7 +43,7 @@ def table_to_records(cls, layer) -> List[Dict[str, Any]]: features.append(data) return features - def table_to_dict(cls, layer) -> Dict[str, Any]: + def table_to_dict(cls, layer: QgsVectorLayer) -> Dict[str, Any]: features = defaultdict(list) for feature in layer.getFeatures(): for key, value in feature.attributeMap().items(): From bc207607677a7e2e7220393b8c1b381f5d30fec7 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 12:32:59 +0200 Subject: [PATCH 37/41] Refactor element Schemata * Also check for presence of columns * Consistently check geometry presence * Collect ttim times for setting tmax * Check timeseries_id presence in ttim layer * Do not contour when no variation in grid * Define some NamedTuples to clean up returning data versus errors --- plugin/qgistim/core/elements/aquifer.py | 52 +++-- plugin/qgistim/core/elements/building_pit.py | 8 +- .../core/elements/circular_area_sink.py | 12 +- plugin/qgistim/core/elements/constant.py | 8 +- plugin/qgistim/core/elements/domain.py | 58 +++-- plugin/qgistim/core/elements/element.py | 207 ++++++++---------- .../qgistim/core/elements/head_line_sink.py | 12 +- plugin/qgistim/core/elements/headwell.py | 17 +- .../core/elements/impermeable_line_doublet.py | 6 +- .../core/elements/leaky_building_pit.py | 8 +- .../core/elements/leaky_line_doublet.py | 6 +- .../qgistim/core/elements/line_sink_ditch.py | 12 +- plugin/qgistim/core/elements/observation.py | 10 +- .../core/elements/polygon_area_sink.py | 6 +- .../core/elements/polygon_inhomogeneity.py | 9 +- .../elements/polygon_semi_confined_top.py | 6 +- plugin/qgistim/core/elements/schemata.py | 134 ++++++++++++ plugin/qgistim/core/elements/uniform_flow.py | 5 +- plugin/qgistim/core/elements/well.py | 14 +- plugin/qgistim/core/schemata.py | 9 +- plugin/qgistim/widgets/compute_widget.py | 9 +- plugin/qgistim/widgets/dataset_widget.py | 72 +++--- 22 files changed, 436 insertions(+), 244 deletions(-) create mode 100644 plugin/qgistim/core/elements/schemata.py diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py index e2501f7..8571c13 100644 --- a/plugin/qgistim/core/elements/aquifer.py +++ b/plugin/qgistim/core/elements/aquifer.py @@ -1,9 +1,8 @@ -from typing import Any, Dict - from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField from qgistim.core import geopackage -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import ExtractionResult, TransientElement +from qgistim.core.elements.schemata import SingleRowSchema, TableSchema from qgistim.core.schemata import ( AllGreaterEqual, AllRequired, @@ -11,13 +10,13 @@ OptionalFirstOnly, Positive, Range, + Required, SemiConfined, - SingleRow, StrictlyDecreasing, ) -class AquiferSchema(ElementSchema): +class AquiferSchema(TableSchema): timml_schemata = { "layer": AllRequired(Range()), "aquifer_top": AllRequired(StrictlyDecreasing()), @@ -37,7 +36,12 @@ class AquiferSchema(ElementSchema): } -# ttim_consistency_schemata = (SingleRow(),) +class TemporalSettingsSchema(SingleRowSchema): + ttim_schemata = { + "time_min": Required(Positive()), + "laplace_inversion_M": Required(Positive()), + "reference_date": Required(), + } class Aquifer(TransientElement): @@ -72,6 +76,7 @@ class Aquifer(TransientElement): "aquifer_npor", ) schema = AquiferSchema() + assoc_schema = TemporalSettingsSchema() def __init__(self, path: str, name: str): self._initialize_default(path, name) @@ -91,18 +96,33 @@ def remove_from_geopackage(self): """This element may not be removed.""" return - def to_timml(self): - data = self.table_to_dict(self.timml_layer) - errors = self.schema.validate_timml(data) - return errors, data + def to_timml(self) -> ExtractionResult: + missing = self.check_timml_columns() + if missing: + return ExtractionResult(errors=missing) + + data = self.table_to_dict(layer=self.timml_layer) + errors = self.schema.validate_timml(name=self.timml_layer.name(), data=data) + return ExtractionResult(errors=errors, data=data) + + def to_ttim(self) -> ExtractionResult: + missing = self.check_ttim_columns() + if missing: + return ExtractionResult(errors=missing) - def to_ttim(self): - data = self.table_to_dict(self.timml_layer) - errors = self.schema.validate_ttim(data) - time_data = self.table_to_records(self.ttim_layer)[0] - return errors, {**data, **time_data} + data = self.table_to_dict(layer=self.timml_layer) + time_data = self.table_to_records(layer=self.ttim_layer) + errors = { + **self.schema.validate_ttim(name=self.timml_layer.name(), data=data), + **self.assoc_schema.validate_ttim( + name=self.ttim_layer.name(), data=time_data + ), + } + if errors: + return ExtractionResult(errors=errors) + return ExtractionResult(data={**data, **time_data[0]}) - def extract_data(self, transient: bool) -> Dict[str, Any]: + def extract_data(self, transient: bool) -> ExtractionResult: if transient: return self.to_ttim() else: diff --git a/plugin/qgistim/core/elements/building_pit.py b/plugin/qgistim/core/elements/building_pit.py index 897ce6e..4b399e4 100644 --- a/plugin/qgistim/core/elements/building_pit.py +++ b/plugin/qgistim/core/elements/building_pit.py @@ -3,7 +3,8 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import RED, TRANSPARENT_RED -from qgistim.core.elements.element import AssociatedElement, ElementSchema +from qgistim.core.elements.element import AssociatedElement +from qgistim.core.elements.schemata import RowWiseSchema, TableSchema from qgistim.core.schemata import ( AllGreaterEqual, AllRequired, @@ -19,15 +20,16 @@ ) -class BuildingPitSchema(ElementSchema): +class BuildingPitSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "inhomogeneity_id": Required(Membership("properties inhomogeneity_id")), "order": Required(Positive()), "ndegrees": Required(Positive()), } -class AssociatedBuildingPitSchema(ElementSchema): +class AssociatedBuildingPitSchema(TableSchema): timml_schemata = { "inhomogeneity_id": AllRequired(), "layer": AllRequired(Range()), diff --git a/plugin/qgistim/core/elements/circular_area_sink.py b/plugin/qgistim/core/elements/circular_area_sink.py index 86c12d6..205cb2c 100644 --- a/plugin/qgistim/core/elements/circular_area_sink.py +++ b/plugin/qgistim/core/elements/circular_area_sink.py @@ -3,7 +3,8 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsField from qgistim.core.elements.colors import GREEN, TRANSPARENT_GREEN -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import ( AllOrNone, AllRequired, @@ -16,7 +17,7 @@ ) -class CircularAreaSinkSchema(ElementSchema): +class CircularAreaSinkSchema(RowWiseSchema): timml_schemata = { "geometry": Required(CircularGeometry()), "rate": Required(), @@ -25,7 +26,7 @@ class CircularAreaSinkSchema(ElementSchema): ttim_schemata = { "time_start": Optional(Positive()), "time_end": Optional(Positive()), - "timeseries_id": Optional(Membership("timeseries_ids")), + "timeseries_id": Optional(Membership("ttim timeseries IDs")), } ttim_consistency_schemata = ( AllOrNone("time_start", "time_end", "rate_transient"), @@ -89,12 +90,11 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: def process_ttim_row(self, row, grouped): xc, yc, radius = self._centroid_and_radius(row) - tsandN, tmax = self.transient_input(row, grouped, "rate") - self.times.append(tmax) + tsandN, times = self.transient_input(row, grouped, "rate") return { "xc": xc, "yc": yc, "R": radius, "tsandN": tsandN, "label": row["label"], - } + }, times diff --git a/plugin/qgistim/core/elements/constant.py b/plugin/qgistim/core/elements/constant.py index f6361ce..ddd3465 100644 --- a/plugin/qgistim/core/elements/constant.py +++ b/plugin/qgistim/core/elements/constant.py @@ -1,12 +1,12 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import RED -from qgistim.core.elements.element import Element, ElementSchema -from qgistim.core.schemata import Membership, Required, SingleRow +from qgistim.core.elements.element import Element +from qgistim.core.elements.schemata import SingleRowSchema +from qgistim.core.schemata import Membership, Required -class ConstantSchema(ElementSchema): - timml_consistency_schemata = (SingleRow(),) +class ConstantSchema(SingleRowSchema): timml_schemata = { "geometry": Required(), "head": Required(), diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index 3aab043..6c40cc7 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -9,15 +9,15 @@ QgsSingleSymbolRenderer, ) from qgistim.core.elements.colors import BLACK -from qgistim.core.elements.element import ElementSchema, TransientElement -from qgistim.core.schemata import AllRequired, Positive, Required, SingleRow +from qgistim.core.elements.element import ExtractionResult, TransientElement +from qgistim.core.elements.schemata import SingleRowSchema +from qgistim.core.schemata import AllRequired, Positive, Required, StrictlyIncreasing -class DomainSchema(ElementSchema): +class DomainSchema(SingleRowSchema): timml_schemata = {"geometry": Required()} - timml_consistency_schemata = (SingleRow(),) timeseries_schemata = { - "time": AllRequired(Positive()), + "time": AllRequired(Positive(), StrictlyIncreasing()), } @@ -28,7 +28,6 @@ class Domain(TransientElement): schema = DomainSchema() def __init__(self, path: str, name: str): - self.times = [] self._initialize_default(path, name) self.timml_name = f"timml {self.element_type}:Domain" self.ttim_name = "ttim Computation Times:Domain" @@ -66,25 +65,38 @@ def update_extent(self, iface: Any) -> Tuple[float, float]: canvas.refresh() return ymax, ymin - def to_timml(self, other): - data = self.table_to_records(self.timml_layer) - errors = self.schema.validate_timml(data, other) + def to_timml(self, other) -> ExtractionResult: + data = self.table_to_records(layer=self.timml_layer) + errors = self.schema.validate_timml( + name=self.timml_layer.name(), data=data, other=other + ) if errors: - return errors, None + return ExtractionResult(errors=errors) else: x = [point[0] for point in data[0]["geometry"]] y = [point[1] for point in data[0]["geometry"]] - return None, { - "xmin": min(x), - "xmax": max(x), - "ymin": min(y), - "ymax": max(y), - } + return ExtractionResult( + data={ + "xmin": min(x), + "xmax": max(x), + "ymin": min(y), + "ymax": max(y), + } + ) + + def to_ttim(self, other) -> ExtractionResult: + timml_extraction = self.to_timml(other) + data = timml_extraction.data - def to_ttim(self, other): - _, data = self.to_timml(other) - ttim_data = self.table_to_dict(self.ttim_layer) - ttim_errors = self.schema.validate_ttim(ttim_data, other) - data["time"] = ttim_data["time"] - self.times = [max(data["time"])] - return ttim_errors, data + timeseries = self.table_to_dict(layer=self.ttim_layer) + errors = self.schema.validate_timeseries( + name=self.ttim_layer.name(), data=timeseries + ) + if errors: + return ExtractionResult(errors=errors) + if timeseries["time"]: + data["time"] = timeseries["time"] + times = set(timeseries["time"]) + else: + times = set() + return ExtractionResult(data=data, times=times) diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 97e6ba8..41e11ce 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -54,7 +54,7 @@ import abc from collections import defaultdict from copy import deepcopy -from typing import Any, Dict, List, Tuple, Union +from typing import Any, Dict, List, NamedTuple, Optional, Set, Tuple, Union from PyQt5.QtWidgets import ( QDialog, @@ -73,91 +73,12 @@ ) from qgistim.core import geopackage from qgistim.core.extractor import ExtractorMixin -from qgistim.core.schemata import ( - ConsistencySchema, - IterableSchemaContainer, - SchemaContainer, -) - - -class ElementSchema(abc.ABC): - # TODO: check for presence of columns - timml_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} - timml_consistency_schemata: Tuple[ConsistencySchema] = () - ttim_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} - ttim_consistency_schemata: Tuple[ConsistencySchema] = () - timeseries_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} - - @staticmethod - def _validate_rows(schemata, consistency_schemata, data, other=None): - errors = defaultdict(list) - - for schema in consistency_schemata: - _error = schema.validate(data, other) - if _error: - errors["Table:"].append(_error) - for i, row in enumerate(data): - row_errors = defaultdict(list) - for variable, schema in schemata.items(): - _errors = schema.validate(row[variable], other) - if _errors: - row_errors[variable].extend(_errors) - if row_errors: - errors[f"Row {i + 1}:"] = row_errors - - return errors - - @staticmethod - def _validate_table(schemata, consistency_schemata, data, other=None): - if len(data) == 0: - return {"Table:": ["Table is empty."]} - - errors = defaultdict(list) - for variable, schema in schemata.items(): - _errors = schema.validate(data[variable], other) - if _errors: - errors[variable].extend(_errors) - - # The consistency schema rely on the row input being valid. - # Hence, they are tested second. - if not errors: - for schema in consistency_schemata: - _error = schema.validate(data, other) - if _error: - errors["Table:"].append(_error) - - return errors - - @classmethod - def _validate( - cls, schemata, consistency_schemata, data, other=None - ) -> Dict[str, Any]: - if isinstance(data, list): - return cls._validate_rows(schemata, consistency_schemata, data, other) - elif isinstance(data, dict): - return cls._validate_table(schemata, consistency_schemata, data, other) - else: - raise TypeError( - f"Data should be list or dict, received: {type(data).__name__}" - ) - - @classmethod - def validate_timml(cls, data, other=None): - return cls._validate( - cls.timml_schemata, cls.timml_consistency_schemata, data, other - ) - - @classmethod - def validate_ttim(cls, data, other=None): - return cls._validate( - cls.ttim_schemata, cls.ttim_consistency_schemata, data, other - ) - - @classmethod - def validate_timeseries(cls, data, other=None): - return cls._validate_table(cls.timeseries_schemata, (), data, other) +class ExtractionResult(NamedTuple): + errors: Optional[Dict[str, Any]] = None + data: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None + times: Optional[Set[float]] = None class NameDialog(QDialog): @@ -208,7 +129,6 @@ def _initialize_default(self, path, name): self.item = None def __init__(self, path: str, name: str): - self.times = [] # Store the times to get tmax for TTim. self._initialize_default(path, name) self.timml_name = f"timml {self.element_type}:{name}" @@ -329,20 +249,49 @@ def groupby(data: Dict[str, Any], key: str): grouped[groupkey][key].append(value) return grouped - def to_timml(self, other=None): - data = self.table_to_records(self.timml_layer) - errors = self.schema.validate_timml(data, other) + @staticmethod + def _check_table_columns(attributes, layer) -> Dict[str, List]: + """ + Check if any columns are missing from the table. + + In that case, abort and present an error message. + """ + fields = set(field.name() for field in layer.fields()) + missing = set(attr.name() for attr in attributes) - fields + if missing: + columns = ",".join(missing) + msg = ( + f"Table is missing columns: {columns}. " + "Remove and recreate the layer." + ) + return {"Table:": [msg]} + return {} + + def check_timml_columns(self): + return self._check_table_columns( + attributes=self.timml_attributes, layer=self.timml_layer + ) + + def to_timml(self, other=None) -> ExtractionResult: + missing = self.check_timml_columns() + if missing: + return ExtractionResult(errors=missing) + + data = self.table_to_records(layer=self.timml_layer) + errors = self.schema.validate_timml( + name=self.timml_layer.name(), data=data, other=other + ) + if errors: - return errors, data + return ExtractionResult(errors=errors) else: elements = [self.process_timml_row(row=row, other=other) for row in data] - return [], elements + return ExtractionResult(data=elements) - def to_ttim(self, other=None): - self.times = [] + def to_ttim(self, other=None) -> ExtractionResult: return self.to_timml(other) - def extract_data(self, transient: bool, other=None) -> Union[List, Dict]: + def extract_data(self, transient: bool, other=None) -> ExtractionResult: if transient: return self.to_ttim(other) else: @@ -460,7 +409,7 @@ def on_transient_changed(self, transient: bool): @staticmethod def transient_input( row, all_timeseries: Dict[str, Any], variable: str - ) -> Tuple[List[Any], float]: + ) -> Tuple[List[Any], Set[float]]: timeseries_id = row["timeseries_id"] row_start = row["time_start"] row_end = row["time_end"] @@ -478,38 +427,57 @@ def transient_input( return [ (time, value - steady_value) for time, value in zip(timeseries["time_start"], timeseries[variable]) - ], max(timeseries["time_start"]) + ], set(timeseries["time_start"]) elif start_and_stop: return [ (row_start, transient_value - steady_value), (row_end, 0.0), - ], row_end + ], {row_end} else: - return [(0.0, 0.0)], 0.0 + return [(0.0, 0.0)], {0.0} + + def check_ttim_columns(self): + return self._check_table_columns( + attributes=self.ttim_attributes, layer=self.ttim_layer + ) def to_ttim(self, other): - self.times = [] + missing = self.check_ttim_columns() + if missing: + return ExtractionResult(errors=missing) + + other = other.copy() # avoid side-effects timeseries = self.table_to_dict(self.ttim_layer) if timeseries: - errors = self.schema.validate_timeseries(timeseries) + errors = self.schema.validate_timeseries( + name=self.ttim_layer.name(), data=timeseries + ) if errors: - return errors, None + return ExtractionResult(errors=errors) grouped = self.groupby(timeseries, "timeseries_id") # Store timeseries in other dict for validation. - other = other.copy() - other["timeseries_ids"] = set(timeseries["timeseries_id"]) + other["ttim timeseries IDs"] = set(timeseries["timeseries_id"]) else: grouped = {} + other["ttim timeseries IDs"] = {None} data = self.table_to_records(self.timml_layer) - # errors = self.schema.validate_ttim(data, other) - # if errors: - # return errors, None + errors = self.schema.validate_ttim( + name=self.timml_layer.name(), data=data, other=other + ) + if errors: + return ExtractionResult(errors=errors) - elements = [self.process_ttim_row(row, grouped) for row in data] - return None, elements + elements = [] + times = set() + for row in data: + row_data, row_times = self.process_ttim_row(row, grouped) + elements.append(row_data) + times.update(row_times) + + return ExtractionResult(data=elements, times=times) class AssociatedElement(Element, abc.ABC): @@ -550,22 +518,35 @@ def remove_from_geopackage(self): geopackage.remove_layer(self.path, self.timml_name) geopackage.remove_layer(self.path, self.assoc_name) - def to_timml(self, other): + def to_timml(self, other) -> ExtractionResult: + missing = self._check_table_columns( + attributes=self.timml_attributes, layer=self.timml_layer + ) + if missing: + return ExtractionResult(errors=missing) + properties = self.table_to_dict(self.assoc_layer) - errors = self.assoc_schema.validate_timml(properties) + errors = self.assoc_schema.validate_timml( + name=self.assoc_layer.name(), + data=properties, + ) if errors: - return errors, None + return ExtractionResult(errors=errors) other = other.copy() # Avoid side-effects other["properties inhomogeneity_id"] = list(set(properties["inhomogeneity_id"])) data = self.table_to_records(self.timml_layer) - errors = self.schema.validate_timml(data, other) + errors = self.schema.validate_timml( + name=self.timml_layer.name(), + data=data, + other=other, + ) if errors: - return errors, None + return ExtractionResult(errors=errors) grouped = self.groupby(properties, "inhomogeneity_id") elements = [self.process_timml_row(row=row, grouped=grouped) for row in data] - return None, elements + return ExtractionResult(data=elements) def to_ttim(self, _): raise NotImplementedError(f"{type(self).__name__} is not supported in TTim.") diff --git a/plugin/qgistim/core/elements/head_line_sink.py b/plugin/qgistim/core/elements/head_line_sink.py index 2e2fbbd..1bdf709 100644 --- a/plugin/qgistim/core/elements/head_line_sink.py +++ b/plugin/qgistim/core/elements/head_line_sink.py @@ -1,7 +1,8 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import BLUE -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import ( AllOrNone, AllRequired, @@ -13,8 +14,9 @@ ) -class HeadLineSinkSchema(ElementSchema): +class HeadLineSinkSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "head": Required(), "resistance": Required(Positive()), "width": Required(Positive()), @@ -28,6 +30,7 @@ class HeadLineSinkSchema(ElementSchema): ttim_schemata = { "time_start": Optional(Positive()), "time_end": Optional(Positive()), + "timeseries_id": Optional(Membership("ttim timeseries IDs")), } timeseries_schemata = { "timeseries_id": AllRequired(), @@ -83,8 +86,7 @@ def process_timml_row(self, row, other=None): } def process_ttim_row(self, row, grouped): - tsandh, tmax = self.transient_input(row, grouped, "head") - self.times.append(tmax) + tsandh, times = self.transient_input(row, grouped, "head") return { "xy": self.linestring_xy(row), "tsandh": tsandh, @@ -92,4 +94,4 @@ def process_ttim_row(self, row, grouped): "wh": row["width"], "layers": row["layer"], "label": row["label"], - } + }, times diff --git a/plugin/qgistim/core/elements/headwell.py b/plugin/qgistim/core/elements/headwell.py index 1f4cbe1..22f0e82 100644 --- a/plugin/qgistim/core/elements/headwell.py +++ b/plugin/qgistim/core/elements/headwell.py @@ -10,24 +10,32 @@ QgsSingleSymbolRenderer, ) from qgistim.core.elements.colors import BLUE -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import ( AllOrNone, AllRequired, Membership, NotBoth, + Optional, Positive, Required, ) -class HeadWellSchema(ElementSchema): +class HeadWellSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "head": Required(), "radius": Required(Positive()), "resistance": Required(Positive()), "layer": Required(Membership("aquifer layers")), } + ttim_schemata = { + "time_start": Optional(Positive()), + "time_end": Optional(Positive()), + "timeseries_id": Optional(Membership("ttim timeseries IDs")), + } ttim_consistency_schemata = ( AllOrNone(("time_start", "time_end", "head_transient")), NotBoth("time_start", "timeseries_id"), @@ -88,8 +96,7 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: def process_ttim_row(self, row, grouped): x, y = self.point_xy(row) - tsandh, tmax = self.transient_input(row, grouped, "head") - self.times.append(tmax) + tsandh, times = self.transient_input(row, grouped, "head") return { "xw": x, "yw": y, @@ -98,7 +105,7 @@ def process_ttim_row(self, row, grouped): "res": row["resistance"], "layers": row["layer"], "label": row["label"], - } + }, times class RemoteHeadWell(HeadWell): diff --git a/plugin/qgistim/core/elements/impermeable_line_doublet.py b/plugin/qgistim/core/elements/impermeable_line_doublet.py index c4383bf..a61b9d8 100644 --- a/plugin/qgistim/core/elements/impermeable_line_doublet.py +++ b/plugin/qgistim/core/elements/impermeable_line_doublet.py @@ -1,12 +1,14 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import RED -from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.elements.element import Element +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import Membership, Positive, Required -class ImpermeableLineDoubletSchema(ElementSchema): +class ImpermeableLineDoubletSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "order": Required(Positive()), "layer": Required(Membership("aquifer layers")), } diff --git a/plugin/qgistim/core/elements/leaky_building_pit.py b/plugin/qgistim/core/elements/leaky_building_pit.py index 3463826..a4be992 100644 --- a/plugin/qgistim/core/elements/leaky_building_pit.py +++ b/plugin/qgistim/core/elements/leaky_building_pit.py @@ -3,7 +3,8 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import RED, TRANSPARENT_RED -from qgistim.core.elements.element import AssociatedElement, ElementSchema +from qgistim.core.elements.element import AssociatedElement +from qgistim.core.elements.schemata import RowWiseSchema, TableSchema from qgistim.core.schemata import ( AllGreaterEqual, AllRequired, @@ -20,15 +21,16 @@ ) -class LeakyBuildingPitSchema(ElementSchema): +class LeakyBuildingPitSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "inhomogeneity_id": Required(Membership("properties inhomogeneity_id")), "order": Required(Positive()), "ndegrees": Required(Positive()), } -class AssociatedLeakyBuildingPitchema(ElementSchema): +class AssociatedLeakyBuildingPitchema(TableSchema): timml_schemata = { "inhomogeneity_id": AllRequired(), "layer": AllRequired(Range()), diff --git a/plugin/qgistim/core/elements/leaky_line_doublet.py b/plugin/qgistim/core/elements/leaky_line_doublet.py index acfc375..936c8f4 100644 --- a/plugin/qgistim/core/elements/leaky_line_doublet.py +++ b/plugin/qgistim/core/elements/leaky_line_doublet.py @@ -1,12 +1,14 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import RED -from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.elements.element import Element +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import Membership, Positive, Required -class LeakyLineDoubletSchema(ElementSchema): +class LeakyLineDoubletSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "resistance": Required(Positive()), "order": Required(Positive()), "layer": Required(Membership("aquifer layers")), diff --git a/plugin/qgistim/core/elements/line_sink_ditch.py b/plugin/qgistim/core/elements/line_sink_ditch.py index a50a963..83d8daa 100644 --- a/plugin/qgistim/core/elements/line_sink_ditch.py +++ b/plugin/qgistim/core/elements/line_sink_ditch.py @@ -1,7 +1,8 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREEN -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import ( AllOrNone, AllRequired, @@ -13,8 +14,9 @@ ) -class LineSinkDitchSchema(ElementSchema): +class LineSinkDitchSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "discharge": Required(), "resistance": Required(Positive()), "width": Required(Positive()), @@ -28,6 +30,7 @@ class LineSinkDitchSchema(ElementSchema): ttim_schemata = { "time_start": Optional(Positive()), "time_end": Optional(Positive()), + "timeseries_id": Optional(Membership("ttim timeseries IDs")), } timeseries_schemata = { "timeseries_id": AllRequired(), @@ -83,8 +86,7 @@ def process_timml_row(self, row, other=None): } def process_ttim_row(self, row, grouped): - tsandQ, tmax = self.transient_input(row, grouped, "discharge") - self.times.append(tmax) + tsandQ, times = self.transient_input(row, grouped, "discharge") return { "xy": self.linestring_xy(row), "tsandQ": tsandQ, @@ -92,4 +94,4 @@ def process_ttim_row(self, row, grouped): "wh": row["width"], "layers": row["layer"], "label": row["label"], - } + }, times diff --git a/plugin/qgistim/core/elements/observation.py b/plugin/qgistim/core/elements/observation.py index 48b3603..7fefaef 100644 --- a/plugin/qgistim/core/elements/observation.py +++ b/plugin/qgistim/core/elements/observation.py @@ -1,11 +1,12 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import LIGHT_BLUE -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import AllRequired, Positive, Required -class HeadObservationSchema(ElementSchema): +class HeadObservationSchema(RowWiseSchema): timml_schemata = { "geometry": Required(), } @@ -49,9 +50,10 @@ def process_timml_row(self, row, other=None): def process_ttim_row(self, row, grouped): x, y = self.point_xy(row) + times = grouped[row["timeseries_id"]]["time"] return { "x": x, "y": y, - "t": grouped[row["timeseries_id"]]["time"], + "t": times, "label": row["label"], - } + }, times diff --git a/plugin/qgistim/core/elements/polygon_area_sink.py b/plugin/qgistim/core/elements/polygon_area_sink.py index 0c3757a..47e016f 100644 --- a/plugin/qgistim/core/elements/polygon_area_sink.py +++ b/plugin/qgistim/core/elements/polygon_area_sink.py @@ -3,12 +3,14 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREEN, TRANSPARENT_GREEN -from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.elements.element import Element +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import Positive, Required -class PolygonAreaSinkSchema(ElementSchema): +class PolygonAreaSinkSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "rate": Required(), "order": Required(Positive()), "ndegrees": Required(Positive()), diff --git a/plugin/qgistim/core/elements/polygon_inhomogeneity.py b/plugin/qgistim/core/elements/polygon_inhomogeneity.py index 55cd9c6..4b7bddd 100644 --- a/plugin/qgistim/core/elements/polygon_inhomogeneity.py +++ b/plugin/qgistim/core/elements/polygon_inhomogeneity.py @@ -3,12 +3,12 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREY, TRANSPARENT_GREY -from qgistim.core.elements.element import AssociatedElement, ElementSchema +from qgistim.core.elements.element import AssociatedElement +from qgistim.core.elements.schemata import RowWiseSchema, TableSchema from qgistim.core.schemata import ( AllGreaterEqual, AllRequired, Membership, - NotBoth, OffsetAllRequired, OptionalFirstOnly, Positive, @@ -19,15 +19,16 @@ ) -class PolygonInhomogeneitySchema(ElementSchema): +class PolygonInhomogeneitySchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "inhomogeneity_id": Required(Membership("properties inhomogeneity_id")), "order": Required(Positive()), "ndegrees": Required(Positive()), } -class AssociatedPolygonInhomogeneitySchema(ElementSchema): +class AssociatedPolygonInhomogeneitySchema(TableSchema): timml_schemata = { "inhomogeneity_id": AllRequired(), "layer": AllRequired(Range()), diff --git a/plugin/qgistim/core/elements/polygon_semi_confined_top.py b/plugin/qgistim/core/elements/polygon_semi_confined_top.py index 0d81dd2..f5aa018 100644 --- a/plugin/qgistim/core/elements/polygon_semi_confined_top.py +++ b/plugin/qgistim/core/elements/polygon_semi_confined_top.py @@ -3,12 +3,14 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import BLUE, TRANSPARENT_BLUE -from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.elements.element import Element +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import Positive, Required -class PolygonSemiConfinedTopSchema(ElementSchema): +class PolygonSemiConfinedTopSchema(RowWiseSchema): timml_schemata = { + "geometry": Required(), "aquitard_c": Required(Positive()), "semiconf_top": Required(), "semiconf_head": Required(), diff --git a/plugin/qgistim/core/elements/schemata.py b/plugin/qgistim/core/elements/schemata.py new file mode 100644 index 0000000..b46aba0 --- /dev/null +++ b/plugin/qgistim/core/elements/schemata.py @@ -0,0 +1,134 @@ +import abc +from collections import defaultdict +from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union + +from qgistim.core.schemata import ( + ConsistencySchema, + IterableSchemaContainer, + SchemaContainer, +) + + +class ValidationData(NamedTuple): + schemata: Dict[str, SchemaContainer] + consistency_schemata: Tuple[ConsistencySchema] + name: str + data: Dict[str, Any] + other: Optional[Dict[str, Any]] = None + + +class SchemaBase(abc.ABC): + # TODO: check for presence of columns + timml_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} + timml_consistency_schemata: Tuple[ConsistencySchema] = () + ttim_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} + ttim_consistency_schemata: Tuple[ConsistencySchema] = () + timeseries_schemata: Dict[str, Union[SchemaContainer, IterableSchemaContainer]] = {} + + @staticmethod + def _validate_table(vd: ValidationData) -> Dict[str, List]: + errors = defaultdict(list) + for variable, schema in vd.schemata.items(): + _errors = schema.validate(vd.data[variable], vd.other) + if _errors: + errors[variable].extend(_errors) + + # The consistency schema rely on the row input being valid. + # Hence, they are tested second. + if not errors: + for schema in vd.consistency_schemata: + _error = schema.validate(vd.data, vd.other) + if _error: + errors["Table:"].append(_error) + + return errors + + @classmethod + def validate_timeseries( + cls, name: str, data: Dict[str, Any], other=None + ) -> Dict[str, List]: + vd = ValidationData(cls.timeseries_schemata, (), name, data, other) + return cls._validate_table(vd) + + @classmethod + def validate_timml( + cls, name: str, data: Dict[str, Any], other=None + ) -> Dict[str, List]: + vd = ValidationData( + cls.timml_schemata, cls.timml_consistency_schemata, name, data, other + ) + return cls._validate(vd) + + @classmethod + def validate_ttim( + cls, name: str, data: Dict[str, Any], other=None + ) -> Dict[str, List]: + vd = ValidationData( + cls.ttim_schemata, cls.ttim_consistency_schemata, name, data, other + ) + return cls._validate(vd) + + @abc.abstractclassmethod + def _validate(vd: ValidationData) -> Dict[str, List]: + pass + + +class TableSchema(SchemaBase, abc.ABC): + """ + Schema for Tabular data, such as Aquifer properties. + """ + + @classmethod + def _validate( + cls, + vd: ValidationData, + ) -> Dict[str, List]: + return cls._validate_table(vd) + + +class RowWiseSchema(SchemaBase, abc.ABC): + """ + Schema for entries that should be validated row-by-row, such as Wells. + """ + + @staticmethod + def _validate(vd: ValidationData) -> Dict[str, List]: + errors = defaultdict(list) + + for i, row in enumerate(vd.data): + row_errors = defaultdict(list) + + for variable, schema in vd.schemata.items(): + _errors = schema.validate(row[variable], vd.other) + if _errors: + row_errors[variable].extend(_errors) + + # Skip consistency tests if the individual values are not good. + if not row_errors: + for schema in vd.consistency_schemata: + _error = schema.validate(row, vd.other) + if _error: + row_errors["Row:"].append(_error) + + if row_errors: + errors[f"Row {i + 1}:"] = row_errors + + return errors + + +class SingleRowSchema(RowWiseSchema, abc.ABC): + """ + Schema for entries that should contain only one row, which should be + validated as a row, such as Constant, Domain, Uniform Flow. + """ + + @staticmethod + def _validate(vd: ValidationData) -> Dict[str, List]: + nrow = len(vd.data) + if nrow > 1: + return { + vd.name: [ + "Table must contain a single row. " f"Table contains {nrow} rows." + ] + } + return RowWiseSchema._validate(vd) diff --git a/plugin/qgistim/core/elements/uniform_flow.py b/plugin/qgistim/core/elements/uniform_flow.py index 571fed1..6993f3a 100644 --- a/plugin/qgistim/core/elements/uniform_flow.py +++ b/plugin/qgistim/core/elements/uniform_flow.py @@ -1,10 +1,11 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsField -from qgistim.core.elements.element import Element, ElementSchema +from qgistim.core.elements.element import Element +from qgistim.core.elements.schemata import SingleRowSchema from qgistim.core.schemata import Required -class UniformFlowSchema(ElementSchema): +class UniformFlowSchema(SingleRowSchema): timml_schemata = { "slope": Required(), "angle": Required(), diff --git a/plugin/qgistim/core/elements/well.py b/plugin/qgistim/core/elements/well.py index 2029e87..46aee89 100644 --- a/plugin/qgistim/core/elements/well.py +++ b/plugin/qgistim/core/elements/well.py @@ -3,7 +3,8 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField, QgsSingleSymbolRenderer from qgistim.core.elements.colors import GREEN -from qgistim.core.elements.element import ElementSchema, TransientElement +from qgistim.core.elements.element import TransientElement +from qgistim.core.elements.schemata import RowWiseSchema from qgistim.core.schemata import ( AllOrNone, AllRequired, @@ -15,7 +16,7 @@ ) -class WellSchema(ElementSchema): +class WellSchema(RowWiseSchema): timml_schemata = { "geometry": Required(), "discharge": Required(), @@ -28,9 +29,9 @@ class WellSchema(ElementSchema): "slug": Required(), "time_start": Optional(Positive()), "time_end": Optional(Positive()), - "timeseries_id": Optional(Membership("timeseries_ids")), + "timeseries_id": Optional(Membership("ttim timeseries IDs")), } - consistency_schemata = ( + ttim_consistency_schemata = ( AllOrNone(("time_start", "time_end", "discharge_transient")), NotBoth("time_start", "timeseries_id"), ) @@ -96,8 +97,7 @@ def process_timml_row(self, row, other=None) -> Dict[str, Any]: def process_ttim_row(self, row, grouped): x, y = self.point_xy(row) - tsandQ, tmax = self.transient_input(row, grouped, "discharge") - self.times.append(tmax) + tsandQ, times = self.transient_input(row, grouped, "discharge") return { "xw": x, "yw": y, @@ -108,4 +108,4 @@ def process_ttim_row(self, row, grouped): "label": row["label"], "rc": row["caisson_radius"], "wbstype": "slug" if row["slug"] else "pumping", - } + }, times diff --git a/plugin/qgistim/core/schemata.py b/plugin/qgistim/core/schemata.py index 162a7d4..022add2 100644 --- a/plugin/qgistim/core/schemata.py +++ b/plugin/qgistim/core/schemata.py @@ -181,7 +181,7 @@ def __init__(self, *variables: Sequence[str]): self.variables = variables def validate(self, data, _=None) -> MaybeError: - present = [data[v] is not None for v in self.variables] + present = [data.get(v) is not None for v in self.variables] if any(present) != all(present): vars = ", ".join(self.variables) return ( @@ -197,7 +197,7 @@ def __init__(self, x: str, y: str): self.y = y def validate(self, data, _=None) -> MaybeError: - if (data[self.x] is not None) and (data[self.y] is not None): + if (data.get(self.x) is not None) and (data.get(self.y) is not None): return f"Either {self.x} or {self.y} should be provided, not both." return None @@ -331,6 +331,7 @@ def validate(self, data, _=None) -> MaybeError: class SingleRow(ConsistencySchema): def validate(self, data, _=None) -> MaybeError: - if len(data) != 1: - return "Table may contain only one row." + nrow = len(data) + if nrow != 1: + return f"Table must contain one row, found {nrow} rows." return None diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index d88012f..5d7efcd 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -247,12 +247,16 @@ def redraw_contours(self) -> None: layer = self.contour_layer.currentLayer() if layer is None: return + + start, stop, step = self.contour_range() + if (start == stop) or (step == 0.0): + return + renderer = layer.rendererSettings() index = renderer.activeScalarDatasetGroup() qgs_index = QgsMeshDatasetIndex(group=index, dataset=0) name = layer.datasetGroupMetadata(qgs_index).name() contours_name = f"{path.stem}-contours-{name}" - start, stop, step = self.contour_range() gpkg_path = str(path.with_suffix(".output.gpkg")) layer = mesh_contours( @@ -407,6 +411,9 @@ def load_mesh_result(self, path: Union[Path, str], load_contours: bool) -> None: start = scalar_settings.classificationMinimum() stop = scalar_settings.classificationMaximum() step = (stop - start) / 21 + # If no head differences are present, no contours can be drawn. + if step == 0.0: + return contour_layer = mesh_contours( gpkg_path=str(path.with_suffix(".output.gpkg")), diff --git a/plugin/qgistim/widgets/dataset_widget.py b/plugin/qgistim/widgets/dataset_widget.py index 3caf5ef..3eeb7bb 100644 --- a/plugin/qgistim/widgets/dataset_widget.py +++ b/plugin/qgistim/widgets/dataset_widget.py @@ -13,7 +13,7 @@ import json from pathlib import Path from shutil import copy -from typing import Any, Dict, List, Set, Tuple +from typing import Any, Dict, List, NamedTuple, Set, Tuple from PyQt5.QtCore import Qt from PyQt5.QtWidgets import ( @@ -55,6 +55,12 @@ ) +class Extraction(NamedTuple): + timml: Dict[str, Any] = None + ttim: Dict[str, Any] = None + success: bool = True + + class DatasetTreeWidget(QTreeWidget): def __init__(self, parent=None): super().__init__(parent) @@ -212,23 +218,29 @@ def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]] # other elements. name = "timml Aquifer:Aquifer" aquifer = elements.pop(name) - _errors, raw_data = aquifer.extract_data(transient) - if _errors: - errors[name] = _errors - else: - aquifer_data = aquifer.aquifer_data(raw_data, transient=transient) - data[name] = aquifer_data - if transient: - data["reference_date"] = str(raw_data["reference_date"].toPyDateTime()) + aquifer_extraction = aquifer.extract_data(transient) + if aquifer_extraction.errors: + errors[name] = aquifer_extraction.errors + return errors, None + + raw_data = aquifer_extraction.data + aquifer_data = aquifer.aquifer_data(raw_data, transient=transient) + data[name] = aquifer_data + if transient: + data["reference_date"] = str(raw_data["reference_date"].toPyDateTime()) + times = set() other = {"aquifer layers": raw_data["layer"], "global_aquifer": raw_data} for name, element in elements.items(): + print(name) try: - _errors, _data = element.extract_data(transient, other) - if _errors: - errors[name] = _errors - elif _data: # skip empty tables - data[name] = _data + extraction = element.extract_data(transient, other) + if extraction.errors: + errors[name] = extraction.errors + elif extraction.data: # skip empty tables + data[name] = extraction.data + if extraction.times: + times.update(extraction.times) except RuntimeError as e: if ( e.args[0] @@ -239,13 +251,11 @@ def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]] else: raise e - # Collect all the times. They have been set in the element while - # converting. if transient: - times = [] - for element in elements.values(): - times.extend(element.times) - data["timml Aquifer:Aquifer"]["tmax"] = max(times) + if times: + data["timml Aquifer:Aquifer"]["tmax"] = max(times) + else: + errors["Model"] = {"TTim input:": ["No transient forcing defined."]} return errors, data @@ -474,7 +484,7 @@ def set_interpreter_interaction(self, value: bool): self.parent.set_interpreter_interaction(value) return - def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]]: + def _extract_data(self, transient: bool) -> Extraction: if self.validation_dialog: self.validation_dialog.close() self.validation_dialog = None @@ -482,16 +492,16 @@ def extract_data(self, transient: bool) -> Tuple[Dict[str, Any], Dict[str, Any]] errors, timml_data = self.dataset_tree.extract_data(transient=False) if errors: self.validation_dialog = ValidationDialog(errors) - return None, None + return Extraction(success=False) ttim_data = None if transient: errors, ttim_data = self.dataset_tree.extract_data(transient=True) if errors: self.validation_dialog = ValidationDialog(errors) - return None, None + return Extraction(success=False) - return timml_data, ttim_data + return Extraction(timml=timml_data, ttim=ttim_data) def convert_to_python(self) -> None: transient = self.transient @@ -499,11 +509,11 @@ def convert_to_python(self) -> None: if outpath == "": # Empty string in case of cancel button press return - timml_data, ttim_data = self.extract_data(transient=transient) - if timml_data is None: + extraction = self._extract_data(transient=transient) + if not extraction.success: return - script = data_to_script(timml_data, ttim_data) + script = data_to_script(extraction.timml, extraction.ttim) with open(outpath, "w") as f: f.write(script) @@ -536,13 +546,13 @@ def convert_to_json( invalid_input: bool Whether validation has succeeded. """ - timml_data, ttim_data = self.extract_data(transient=transient) - if timml_data is None: + extraction = self._extract_data(transient=transient) + if not extraction.success: return True json_data = data_to_json( - timml_data, - ttim_data, + extraction.timml, + extraction.ttim, cellsize=cellsize, output_options=output_options, ) From 02a01840007891055f137d926bd46b65ce5b39bc Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 12:34:38 +0200 Subject: [PATCH 38/41] Rename Extraction result to more specific ElementExtraction --- plugin/qgistim/core/elements/aquifer.py | 18 +++++++------- plugin/qgistim/core/elements/domain.py | 14 +++++------ plugin/qgistim/core/elements/element.py | 32 ++++++++++++------------- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/plugin/qgistim/core/elements/aquifer.py b/plugin/qgistim/core/elements/aquifer.py index 8571c13..35fd052 100644 --- a/plugin/qgistim/core/elements/aquifer.py +++ b/plugin/qgistim/core/elements/aquifer.py @@ -1,7 +1,7 @@ from PyQt5.QtCore import QVariant from qgis.core import QgsDefaultValue, QgsField from qgistim.core import geopackage -from qgistim.core.elements.element import ExtractionResult, TransientElement +from qgistim.core.elements.element import ElementExtraction, TransientElement from qgistim.core.elements.schemata import SingleRowSchema, TableSchema from qgistim.core.schemata import ( AllGreaterEqual, @@ -96,19 +96,19 @@ def remove_from_geopackage(self): """This element may not be removed.""" return - def to_timml(self) -> ExtractionResult: + def to_timml(self) -> ElementExtraction: missing = self.check_timml_columns() if missing: - return ExtractionResult(errors=missing) + return ElementExtraction(errors=missing) data = self.table_to_dict(layer=self.timml_layer) errors = self.schema.validate_timml(name=self.timml_layer.name(), data=data) - return ExtractionResult(errors=errors, data=data) + return ElementExtraction(errors=errors, data=data) - def to_ttim(self) -> ExtractionResult: + def to_ttim(self) -> ElementExtraction: missing = self.check_ttim_columns() if missing: - return ExtractionResult(errors=missing) + return ElementExtraction(errors=missing) data = self.table_to_dict(layer=self.timml_layer) time_data = self.table_to_records(layer=self.ttim_layer) @@ -119,10 +119,10 @@ def to_ttim(self) -> ExtractionResult: ), } if errors: - return ExtractionResult(errors=errors) - return ExtractionResult(data={**data, **time_data[0]}) + return ElementExtraction(errors=errors) + return ElementExtraction(data={**data, **time_data[0]}) - def extract_data(self, transient: bool) -> ExtractionResult: + def extract_data(self, transient: bool) -> ElementExtraction: if transient: return self.to_ttim() else: diff --git a/plugin/qgistim/core/elements/domain.py b/plugin/qgistim/core/elements/domain.py index 6c40cc7..9149c60 100644 --- a/plugin/qgistim/core/elements/domain.py +++ b/plugin/qgistim/core/elements/domain.py @@ -9,7 +9,7 @@ QgsSingleSymbolRenderer, ) from qgistim.core.elements.colors import BLACK -from qgistim.core.elements.element import ExtractionResult, TransientElement +from qgistim.core.elements.element import ElementExtraction, TransientElement from qgistim.core.elements.schemata import SingleRowSchema from qgistim.core.schemata import AllRequired, Positive, Required, StrictlyIncreasing @@ -65,17 +65,17 @@ def update_extent(self, iface: Any) -> Tuple[float, float]: canvas.refresh() return ymax, ymin - def to_timml(self, other) -> ExtractionResult: + def to_timml(self, other) -> ElementExtraction: data = self.table_to_records(layer=self.timml_layer) errors = self.schema.validate_timml( name=self.timml_layer.name(), data=data, other=other ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) else: x = [point[0] for point in data[0]["geometry"]] y = [point[1] for point in data[0]["geometry"]] - return ExtractionResult( + return ElementExtraction( data={ "xmin": min(x), "xmax": max(x), @@ -84,7 +84,7 @@ def to_timml(self, other) -> ExtractionResult: } ) - def to_ttim(self, other) -> ExtractionResult: + def to_ttim(self, other) -> ElementExtraction: timml_extraction = self.to_timml(other) data = timml_extraction.data @@ -93,10 +93,10 @@ def to_ttim(self, other) -> ExtractionResult: name=self.ttim_layer.name(), data=timeseries ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) if timeseries["time"]: data["time"] = timeseries["time"] times = set(timeseries["time"]) else: times = set() - return ExtractionResult(data=data, times=times) + return ElementExtraction(data=data, times=times) diff --git a/plugin/qgistim/core/elements/element.py b/plugin/qgistim/core/elements/element.py index 41e11ce..63227ff 100644 --- a/plugin/qgistim/core/elements/element.py +++ b/plugin/qgistim/core/elements/element.py @@ -75,7 +75,7 @@ from qgistim.core.extractor import ExtractorMixin -class ExtractionResult(NamedTuple): +class ElementExtraction(NamedTuple): errors: Optional[Dict[str, Any]] = None data: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None times: Optional[Set[float]] = None @@ -272,10 +272,10 @@ def check_timml_columns(self): attributes=self.timml_attributes, layer=self.timml_layer ) - def to_timml(self, other=None) -> ExtractionResult: + def to_timml(self, other=None) -> ElementExtraction: missing = self.check_timml_columns() if missing: - return ExtractionResult(errors=missing) + return ElementExtraction(errors=missing) data = self.table_to_records(layer=self.timml_layer) errors = self.schema.validate_timml( @@ -283,15 +283,15 @@ def to_timml(self, other=None) -> ExtractionResult: ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) else: elements = [self.process_timml_row(row=row, other=other) for row in data] - return ExtractionResult(data=elements) + return ElementExtraction(data=elements) - def to_ttim(self, other=None) -> ExtractionResult: + def to_ttim(self, other=None) -> ElementExtraction: return self.to_timml(other) - def extract_data(self, transient: bool, other=None) -> ExtractionResult: + def extract_data(self, transient: bool, other=None) -> ElementExtraction: if transient: return self.to_ttim(other) else: @@ -444,7 +444,7 @@ def check_ttim_columns(self): def to_ttim(self, other): missing = self.check_ttim_columns() if missing: - return ExtractionResult(errors=missing) + return ElementExtraction(errors=missing) other = other.copy() # avoid side-effects timeseries = self.table_to_dict(self.ttim_layer) @@ -453,7 +453,7 @@ def to_ttim(self, other): name=self.ttim_layer.name(), data=timeseries ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) grouped = self.groupby(timeseries, "timeseries_id") # Store timeseries in other dict for validation. @@ -468,7 +468,7 @@ def to_ttim(self, other): name=self.timml_layer.name(), data=data, other=other ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) elements = [] times = set() @@ -477,7 +477,7 @@ def to_ttim(self, other): elements.append(row_data) times.update(row_times) - return ExtractionResult(data=elements, times=times) + return ElementExtraction(data=elements, times=times) class AssociatedElement(Element, abc.ABC): @@ -518,12 +518,12 @@ def remove_from_geopackage(self): geopackage.remove_layer(self.path, self.timml_name) geopackage.remove_layer(self.path, self.assoc_name) - def to_timml(self, other) -> ExtractionResult: + def to_timml(self, other) -> ElementExtraction: missing = self._check_table_columns( attributes=self.timml_attributes, layer=self.timml_layer ) if missing: - return ExtractionResult(errors=missing) + return ElementExtraction(errors=missing) properties = self.table_to_dict(self.assoc_layer) errors = self.assoc_schema.validate_timml( @@ -531,7 +531,7 @@ def to_timml(self, other) -> ExtractionResult: data=properties, ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) other = other.copy() # Avoid side-effects other["properties inhomogeneity_id"] = list(set(properties["inhomogeneity_id"])) @@ -542,11 +542,11 @@ def to_timml(self, other) -> ExtractionResult: other=other, ) if errors: - return ExtractionResult(errors=errors) + return ElementExtraction(errors=errors) grouped = self.groupby(properties, "inhomogeneity_id") elements = [self.process_timml_row(row=row, grouped=grouped) for row in data] - return ExtractionResult(data=elements) + return ElementExtraction(data=elements) def to_ttim(self, _): raise NotImplementedError(f"{type(self).__name__} is not supported in TTim.") From 451579b5b16fdb802d50d5387abb17f230244641 Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 12:36:35 +0200 Subject: [PATCH 39/41] Remove tmp script --- example.py | 47 ----------------------------------------------- 1 file changed, 47 deletions(-) delete mode 100644 example.py diff --git a/example.py b/example.py deleted file mode 100644 index 9ff1670..0000000 --- a/example.py +++ /dev/null @@ -1,47 +0,0 @@ -# %% -import gistim -import os - -# %% - -os.chdir("c:/tmp/timtests") -gistim.compute_steady(path="test3.json") - -# %% - -import numpy as np -import timml - -# %% - - -t_0 = 0.0 -t_1 = 10.0 -s_l = 0.7 -s_u = 0.3 -f_amp = 1.25 - -s_scaled = (f_amp - 1.0) * (t_1 - t_0) -L_sto = (s_scaled + s_l + s_u) / 2 -# %% - -count_L = np.round(np.log(L_sto / s_l) / np.log(f_amp)) -count_U = np.round(np.log(L_sto / s_u) / np.log(f_amp)) -# %% -count_L = max(np.round(count_L).astype(int) , 0) -count_U = max(np.round(count_U).astype(int) , 0) -# %% - -L_lower = f_amp ** (-count_L) -L_upper = f_amp ** (-count_U) -# %% -L_sto2 = s_scaled / (2 - L_lower - L_upper) -# %% - -L_lower = L_lower * L_sto2 -L_upper = L_upper * L_sto2 -# %% - -lower_dt = L_lower * (f_amp ** np.arange(count_L)) -upper_dt = L_upper * (f_amp ** np.arange(count_U))[::-1] -# %% From 5a0144edb8396384b0e7254482c7f125001db4ce Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 15:07:10 +0200 Subject: [PATCH 40/41] Remove dummy ugrid --- plugin/qgistim/core/dummy_ugrid.py | 41 ------------------------ plugin/qgistim/widgets/compute_widget.py | 5 +-- 2 files changed, 1 insertion(+), 45 deletions(-) delete mode 100644 plugin/qgistim/core/dummy_ugrid.py diff --git a/plugin/qgistim/core/dummy_ugrid.py b/plugin/qgistim/core/dummy_ugrid.py deleted file mode 100644 index e17c2c9..0000000 --- a/plugin/qgistim/core/dummy_ugrid.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -This module contains a workaround for the following issue: -https://github.com/qgis/QGIS/issues/44474 - -QGIS does not release the file properly. A mesh layer with new results will not -be visualized properly. This can be worked around by forcing QGIS to load -another (dummy) file with UGRID mesh data. This "passes on" the file lock to -the newly loaded dummy file, and frees the older one. - -This requires always carrying around a dummy netCDF file, or generating it -with Python, then writing it. A netCDF is mostly easily generated using -xarray, at the cost of xarray and netCDF4 as dependencies. - -Instead, this module contains the binary representation of a netCDF4 file -that has been generated with the following lines of code: - -```python -import base64 -import numpy as np -import xarray as xr - -ds = xr.Dataset() -ds["dummy"] = xr.DataArray( - [[np.nan]], {"y": [0.], "x": [0.]}, ("y", "x") -) -ds.to_netcdf("dummy.nc") -with open("dummy.nc", "rb"): - binary_content = f.read() -b64_encoded = base64.b64encode(binary_content) -``` - -""" -import base64 -from pathlib import Path - -b64_encoded = "iUhERg0KGgoAAAAAAAgIAAQAEAAAAAAAAAAAAAAAAAD//////////7UkAAAAAAAA//////////8AAAAAAAAAAGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAT0hEUgIM4AIiAAAAAAADBAAAAAAAAAD///////////////////////////////8KAgABAAAAABAQAAAAAMENAAAAAAAAQAAAAAAAAAAVHAAEAAAAAwEA////////////////////////////////DEoAAAAAAQAOAAgACABfTkNQcm9wZXJ0aWVzAAAAEwAAACIAAAABAAAAAAAAAHZlcnNpb249MixuZXRjZGY9NC43LjQsaGRmNT0xLjEwLjYGFAAAAAABBAIAAAAAAAAAAXnfAgAAAAAAAAAIAAAAAAAAAAAAAAAAnsdezk9IRFICDYgBARgAAAAAAQEBAAAAAAABAAAAAAAAAAEAAAAAAAAAAxQAAQAAESA/AAgAAAAAAEAANAsANP8DAAAQEAAAAAC3DwAAAAAAANwAAAAAAAAABAwAAQAACAAAAAAAAAAAAPh/CBIAAAAAAwFBBAAAAAAAAAgAAAAAAAAAFRwABAAAAAMFAP///////////////////////////////wwwAAAAAAEABgAIAAgAQ0xBU1MAAAATAAAAEAAAAAEAAAAAAAAARElNRU5TSU9OX1NDQUxFAAwiAAABAAEABQAIAAgATkFNRQAAAAATAAAAAgAAAAEAAAAAAAAAeAAMNAAAAgABAA4ADAAIAF9OZXRjZGY0RGltaWQAAAAQCAAABAAAAAAAIAAAAAAAAQAAAAAAAAAAAAAADFAAAAMAAQALABQAGABfRmlsbFZhbHVlAAAAAAAAESA/AAgAAAAAAEAANAsANP8DAAAAAAAAAQEBAAAAAAABAAAAAAAAAAEAAAAAAAAAAAAAAAAA+H8KLddoT0hEUgINMgEBGAAAAAABAQEAAAAAAAEAAAAAAAAAAQAAAAAAAAADFAABAAARID8ACAAAAAAAQAA0CwA0/wMAABAQAAAAAEEMAAAAAAAAdAAAAAAAAAAEDAABAAAIAAAAAAAAAAAA+H8IEgAAAAADAUkEAAAAAAAACAAAAAAAAAAVHAAEAAAAAwUA////////////////////////////////DDAAAAAAAQAGAAgACABDTEFTUwAAABMAAAAQAAAAAQAAAAAAAABESU1FTlNJT05fU0NBTEUADCIAAAEAAQAFAAgACABOQU1FAAAAABMAAAACAAAAAQAAAAAAAAB5AAw0AAACAAEADgAMAAgAX05ldGNkZjREaW1pZAAAABAIAAAEAAAAAAAgAAAAAAABAAAAAAAAAAEAAACugT5IZAAAABAIAAAEAAAAAAAgAAAAAAABAAAAAAAAAAEAAAAE/QA+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAPh/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABPQ0hLEBAAAAAA2w4AAAAAAADcAAAAAAAAAAxQAAADAAEACwAUABgAX0ZpbGxWYWx1ZQAAAAAAABEgPwAIAAAAAABAADQLADT/AwAAAAAAAAEBAQAAAAAAAQAAAAAAAAABAAAAAAAAAAAAAAAAAPh/DFP14U9IRFICDQABASgAAAAAAQIBAAAAAAABAAAAAAAAAAEAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAMUAAEAABEgPwAIAAAAAABAADQLADT/AwAAEBAAAAAAAQ4AAAAAAADaAAAAAAAAAAQMAAEAAAgAAAAAAAAAAAD4fwgSAAAAAAMBUQQAAAAAAAAIAAAAAAAAABUcAAQAAAADAwD///////////////////////////////8MUAAAAAABABQADAAYAF9OZXRjZGY0Q29vcmRpbmF0ZXMAAAAAABAIAAAEAAAAAAAgAAAAAAABAQEAAAAAAAIAAAAAAAAAAgAAAAAAAAABAAAAAAAAACnzvHpPQ0hLBhQAAAAAAQQAAAAAAAAAAAF4SwEAAAAAAAAGGAAAAAABBAMAAAAAAAAABWR1bW15tQwAAAAAAACBIi5kT0NISwUQAAEAAAICAgEIAAAAAAAAAAAA+H8MUAAAAQABAAsAFAAYAF9GaWxsVmFsdWUAAAAAAAARID8ACAAAAAAAQAA0CwA0/wMAAAAAAAABAQEAAAAAAAEAAAAAAAAAAQAAAAAAAAAAAAAAAAD4fwxgAAACAAEADwAQABgARElNRU5TSU9OX0xJU1QAABkAAAAQAAAAFwAAAAgAAAABAQEAAAAAAAIAAAAAAAAAAgAAAAAAAAABAAAAtRQAAAAAAAACAAAAAQAAALUUAAAAAAAAAwAAAFHJK5JPQ0hLBRAAAQAAAgICAQgAAAAAAAAAAAD4fwy4AAAEAAEADwB0ABgAUkVGRVJFTkNFX0xJU1QAABYCAAAQAAAAZGF0YXNldAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABcAAAAIAAAAZGltZW5zaW9uAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAgAAAQAAAAAACAAAAAAAAEBAQAAAAAAAQAAAAAAAAABAAAAAAAAALUMAAAAAAAAAAAAAAAAAACsBlGrT0NISwUQAAEAAAICAgEIAAAAAAAAAAAA+H8MuAAABAABAA8AdAAYAFJFRkVSRU5DRV9MSVNUAAAWAgAAEAAAAGRhdGFzZXQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAXAAAACAAAAGRpbWVuc2lvbgAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAIAAAEAAAAAAAgAAAAAAABAQEAAAAAAAEAAAAAAAAAAQAAAAAAAAC1DAAAAAAAAAEAAAAAAAAAsUXkDQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAR0NPTAEAAAAAEAAAAAAAAAEAAAAAAAAACAAAAAAAAADfAgAAAAAAAAIAAAAAAAAACAAAAAAAAADfAgAAAAAAAAMAAAAAAAAACAAAAAAAAABLAQAAAAAAAAAAAAAAAAAAqA8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==" - - -def write_dummy_ugrid(path: Path) -> None: - with open(path, "wb") as f: - f.write(base64.b64decode(b64_encoded)) diff --git a/plugin/qgistim/widgets/compute_widget.py b/plugin/qgistim/widgets/compute_widget.py index 5d7efcd..9e187f4 100644 --- a/plugin/qgistim/widgets/compute_widget.py +++ b/plugin/qgistim/widgets/compute_widget.py @@ -28,7 +28,6 @@ ) from qgis.gui import QgsMapLayerComboBox from qgistim.core import geopackage, layer_styling -from qgistim.core.dummy_ugrid import write_dummy_ugrid from qgistim.core.elements import ELEMENTS, parse_name from qgistim.core.processing import mesh_contours, set_temporal_properties from qgistim.core.task import BaseServerTask @@ -76,9 +75,6 @@ def __init__(self, parent=None): self.start_task = None self.parent = parent - self.dummy_ugrid_path = Path(tempfile.mkdtemp()) / "qgistim-dummy-ugrid.nc" - write_dummy_ugrid(self.dummy_ugrid_path) - self.domain_button = QPushButton("Set to current extent") self.compute_button = QPushButton("Compute") self.compute_button.clicked.connect(self.compute) @@ -476,6 +472,7 @@ def load_vector_result(self, path: Union[Path, str]) -> None: # Set the temporal properties if it's a temporal layer set_temporal_properties(layer) + # Special-case the labelling for observations and discharge. if ( "timml Head Observation:" in layername or "ttim Head Observation" in layername From 33f4a3c54c67ff6bf9b7ec85511f4b64ef49905e Mon Sep 17 00:00:00 2001 From: Huite Bootsma Date: Fri, 27 Oct 2023 15:11:44 +0200 Subject: [PATCH 41/41] imports, small docstring fix --- gistim/__init__.py | 5 ++--- plugin/qgistim/core/elements/__init__.py | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/gistim/__init__.py b/gistim/__init__.py index 8647a4c..413d7b5 100644 --- a/gistim/__init__.py +++ b/gistim/__init__.py @@ -1,8 +1,7 @@ import pkg_resources -from gistim.compute import compute_steady, compute_transient -from gistim.data_extraction import as_aquifer_aquitard, layer_statistics -from gistim.ugrid import to_ugrid2d +import gistim.compute +import gistim.data_extraction # version try: diff --git a/plugin/qgistim/core/elements/__init__.py b/plugin/qgistim/core/elements/__init__.py index 730650a..4985967 100644 --- a/plugin/qgistim/core/elements/__init__.py +++ b/plugin/qgistim/core/elements/__init__.py @@ -57,9 +57,7 @@ def parse_name(layername: str) -> Tuple[str, str, str]: * what the user provided name is. For example: - parse_name("timml Headwell: drainage") -> ("timml", "Head Well", "drainage") - - This function can also be found in gistim.common + parse_name("timml Headwell:drainage") -> ("timml", "Head Well", "drainage") """ prefix, name = layername.split(":") element_type = re.split("timml |ttim ", prefix)[1]