Skip to content

Commit

Permalink
Merge branch 'master' into dependabot/pip/black-24.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
wfaustmann authored Dec 2, 2024
2 parents 1ff3c47 + f6d535f commit adbb114
Show file tree
Hide file tree
Showing 911 changed files with 243,360 additions and 136,165 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ requests.egg-info/
*.pyc
*.swp
*.egg
.env/
env/
.venv/
venv/
Expand Down
2 changes: 1 addition & 1 deletion docs/user/setup.rst
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ At the moment that version number is:

* D-Settlement **23.2**
* D-Foundations **23.1**
* D-SheetPiling **23.1**
* D-SheetPiling **24.1**
* D-Stability **2024.01**
* D-Geo Flow **2024.01**

Expand Down
3 changes: 1 addition & 2 deletions geolib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
GEOLib Library
"""

__version__ = "2.3.0"
__version__ = "2.5.0"

from . import utils
from .models import *
3 changes: 2 additions & 1 deletion geolib/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"""

from .base_model_structure import BaseDataClass, BaseModelStructure # isort:skip
from .base_model import BaseModel, BaseModelList
from .base_model import BaseModel
from .base_model_list import BaseModelList
from .dfoundations import DFoundationsModel
from .dsettlement import DSettlementModel
from .dsheetpiling import DSheetPilingModel
Expand Down
16 changes: 16 additions & 0 deletions geolib/models/base_data_class.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from pydantic import BaseModel, ConfigDict

from .meta import MetaData

settings = MetaData()


class BaseDataClass(BaseModel):
"""Base class for *all* pydantic classes in GEOLib."""

model_config = ConfigDict(
validate_assignment=True,
arbitrary_types_allowed=True,
validate_default=True,
extra=settings.extra_fields,
)
155 changes: 23 additions & 132 deletions geolib/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,32 +5,36 @@
"""
import abc
import logging
import os
from abc import abstractmethod, abstractproperty
from pathlib import Path, PosixPath, WindowsPath
from subprocess import Popen, run
from types import CoroutineType
from abc import abstractmethod
from pathlib import Path
from subprocess import run
from typing import List, Optional, Type, Union

import requests
from pydantic import DirectoryPath, FilePath, HttpUrl, conlist
from pydantic.error_wrappers import ValidationError
from pydantic import DirectoryPath, FilePath, HttpUrl, SerializeAsAny, ValidationError
from requests.auth import HTTPBasicAuth

from geolib.errors import CalculationError
from geolib.models import BaseDataClass

from .base_model_structure import BaseModelStructure
from .meta import MetaData
from .parsers import BaseParserProvider
from geolib.models.base_model_structure import BaseModelStructure
from geolib.models.meta import MetaData
from geolib.models.parsers import BaseParserProvider

logger = logging.getLogger(__name__)
meta = MetaData()


class BaseModel(BaseDataClass, abc.ABC):
filename: Optional[Path]
datastructure: Optional[BaseModelStructure]
filename: Optional[Path] = None
datastructure: Optional[SerializeAsAny[BaseModelStructure]] = None
"""
This is the base class for all models in GEOLib.
Note that `datastructure` is a `SerializeAsAny` type, which means that
the inheriting class is serialized according to its own definition (duck-typing).
This is needed since Pydantic v2 as the default behavior has changed:
https://docs.pydantic.dev/latest/concepts/serialization/#subclass-instances-for-fields-of-basemodel-dataclasses-typeddict
"""

def execute(self, timeout_in_seconds: int = meta.timeout) -> "BaseModel":
"""Execute a Model and wait for `timeout` seconds.
Expand Down Expand Up @@ -85,7 +89,7 @@ def execute(self, timeout_in_seconds: int = meta.timeout) -> "BaseModel":
else:
error = self.get_error_context()
raise CalculationError(
process.returncode, error + " Path: " + str(output_filename.absolute)
process.returncode, error + " Path: " + str(output_filename.absolute())
)

def execute_remote(self, endpoint: HttpUrl) -> "BaseModel":
Expand Down Expand Up @@ -133,7 +137,7 @@ def serialize(
@property
def default_console_path(self) -> Path:
raise NotImplementedError("Implement in concrete classes.")

@property
def custom_console_path(self) -> Optional[Path]:
return None
Expand All @@ -146,7 +150,8 @@ def console_flags(self) -> List[str]:
def console_flags_post(self) -> List[str]:
return []

@abstractproperty
@property
@abstractmethod
def parser_provider_type(self) -> Type[BaseParserProvider]:
"""Returns the parser provider type of the current concrete class.
Expand Down Expand Up @@ -186,135 +191,21 @@ def output(self):
Requires a successful execute.
"""
return self.datastructure.results

def get_meta_property(self, key: str) -> Optional[str]:
"""Get a metadata property from the input file."""
if hasattr(meta, key):
return meta.__getattribute__(key)
else:
return None

def set_meta_property(self, key: str, value: str) -> None:
"""Set a metadata property from the input file."""
if hasattr(meta, key):
meta.__setattr__(key, value)
else:
raise ValueError(f"Metadata property {key} does not exist.")

class BaseModelList(BaseDataClass):
"""Hold multiple models that can be executed in parallel.
Note that all models need to have a unique filename
otherwise they will overwrite eachother. This also helps with
identifying them later."""

models: List[BaseModel]
errors: List[str] = []

def execute(
self,
calculation_folder: DirectoryPath,
timeout_in_seconds: int = meta.timeout,
nprocesses: Optional[int] = os.cpu_count(),
) -> "BaseModelList":
"""Execute all models in this class in parallel.
We split the list to separate folders and call a batch processes on each folder.
Note that the order of models will change.
"""

# manual check as remote execution could result in zero models
if len(self.models) == 0:
raise ValueError("Can't execute with zero models.")

lead_model = self.models[0]
processes = []
output_models = []
errors = []

# Divide the models over n processes and make sure to copy them to prevent aliasing
split_models = [self.models[i::nprocesses] for i in range(nprocesses)]
for i, models in enumerate(split_models):
if len(models) == 0:
continue
unique_folder = calculation_folder / str(i)
unique_folder.mkdir(parents=True, exist_ok=True)

for model in models:
fn = unique_folder / model.filename.name
model.serialize(fn.resolve())

executable = meta.console_folder / lead_model.default_console_path
if not executable.exists():
logger.error(
f"Please make sure the `geolib.env` file points to the console folder. GEOLib now can't find it at `{executable}`"
)
raise CalculationError(
-1, f"Console executable not found at {executable}."
)

process = Popen(
[str(executable)] + lead_model.console_flags + [str(i)],
cwd=str(calculation_folder.resolve()),
)
processes.append(process)

# Wait for all processes to be done
for process in processes:
logger.debug(f"Executed with {process.args}")
process.wait(timeout=timeout_in_seconds)

# Iterate over the models
for i, models in enumerate(split_models):
for model in models:
model = model.copy(deep=True) # prevent aliasing
output_filename = output_filename_from_input(model)
if output_filename.exists():
try:
model.parse(output_filename)
output_models.append(model)

except ValidationError:
logger.warning(
f"Ouput file generated but parsing of {output_filename.name} failed."
)
error = model.get_error_context()
errors.append(error)
else:
logger.warning(
f"Model @ {output_filename.name} failed. Please check the .err file and batchlog.txt in its folder."
)
error = model.get_error_context()
errors.append(error)

return self.__class__(models=output_models, errors=errors)

def execute_remote(self, endpoint: HttpUrl) -> "BaseModelList":
"""Execute all models in this class in parallel on a remote endpoint.
Note that the order of models will change.
"""
lead_model = self.models[0]

response = requests.post(
requests.compat.urljoin(
endpoint, f"calculate/{lead_model.__class__.__name__.lower()}s"
),
data="[" + ",".join((model.json() for model in self.models)) + "]",
auth=HTTPBasicAuth(meta.gl_username, meta.gl_password),
)
if response.status_code == 200:
models = response.json()["models"]
errors = response.json()["errors"]
stripped_models = []
for model in models:
# remove possibly invalid external metadata
model.get("meta", {}).pop("console_folder", None)
stripped_models.append(lead_model.__class__(**model))
return self.__class__(models=stripped_models, errors=errors)
else:
raise CalculationError(response.status_code, response.text)


def output_filename_from_input(model: BaseModel, extension: str = None) -> Path:
if not extension:
Expand Down
Loading

0 comments on commit adbb114

Please sign in to comment.