Skip to content

Commit

Permalink
Merge branch 'master' into dependabot/pip/urllib3-2.2.2
Browse files Browse the repository at this point in the history
  • Loading branch information
wfaustmann authored Dec 4, 2024
2 parents 424e0b9 + cf6c451 commit 1b20cfe
Show file tree
Hide file tree
Showing 911 changed files with 243,105 additions and 138,445 deletions.
60 changes: 0 additions & 60 deletions .github/workflows/CI-v1.yml

This file was deleted.

4 changes: 2 additions & 2 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: ci-pydantic-v2
name: ci

on:
push:
Expand Down Expand Up @@ -45,7 +45,7 @@ jobs:
poetry config virtualenvs.path ~/.virtualenvs
- name: Install Dependencies
run: poetry install -E server -E pydantic-v2
run: poetry install -E server

- name: Test with pytest
run: poetry run pytest --cov=geolib --cov-report xml:coverage-reports/coverage-hydrolib-core.xml --junitxml=xunit-reports/xunit-result-hydrolib-core.xml -m "unittest and not workinprogress"
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ requests.egg-info/
*.pyc
*.swp
*.egg
.env/
env/
.venv/
venv/
Expand Down
14 changes: 0 additions & 14 deletions docs/user/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -42,20 +42,6 @@ This package, unlike GEOLib+, tries to limit the number of

You don't need to install anything manually, as the pip installation should take care of it.

Combining GEOLib with pydantic v2
---------------------------------

GEOLib uses pydantic for validation of types and some parameters (min/max/defaults). The
latest version of pydantic (v2) has some breaking changes. When using pydantic v2, some
extra dependencies are required.To use GEOLib with pydantic v2, you can use the following
command to automatically install the extra dependencies::

$ pip install d-geolib[pydantic-v2]

When the extra dependencies are not installed, and pydantic v2 is used, an error will be
thrown when trying to import GEOLib. The error message will guide you in installing the
required packages yourself.

Get the Source Code
-------------------

Expand Down
2 changes: 1 addition & 1 deletion docs/user/setup.rst
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ At the moment that version number is:

* D-Settlement **23.2**
* D-Foundations **23.1**
* D-SheetPiling **23.1**
* D-SheetPiling **24.1**
* D-Stability **2024.01**
* D-Geo Flow **2024.01**

Expand Down
3 changes: 1 addition & 2 deletions geolib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
GEOLib Library
"""

__version__ = "2.3.0"
__version__ = "2.5.0"

from . import utils
from .models import *
24 changes: 0 additions & 24 deletions geolib/_compat.py

This file was deleted.

3 changes: 2 additions & 1 deletion geolib/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"""

from .base_model_structure import BaseDataClass, BaseModelStructure # isort:skip
from .base_model import BaseModel, BaseModelList
from .base_model import BaseModel
from .base_model_list import BaseModelList
from .dfoundations import DFoundationsModel
from .dsettlement import DSettlementModel
from .dsheetpiling import DSheetPilingModel
Expand Down
17 changes: 17 additions & 0 deletions geolib/models/base_data_class.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from pydantic import BaseModel, ConfigDict

from .meta import MetaData

settings = MetaData()


class BaseDataClass(BaseModel):
"""Base class for *all* pydantic classes in GEOLib."""

model_config = ConfigDict(
validate_assignment=True,
arbitrary_types_allowed=True,
validate_default=True,
ser_json_inf_nan='constants',
extra=settings.extra_fields,
)
153 changes: 18 additions & 135 deletions geolib/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,39 +5,36 @@
"""
import abc
import logging
import os
from abc import abstractmethod, abstractproperty
from pathlib import Path, PosixPath, WindowsPath
from subprocess import Popen, run
from types import CoroutineType
from abc import abstractmethod
from pathlib import Path
from subprocess import run
from typing import List, Optional, Type, Union

import requests
from pydantic import DirectoryPath, FilePath, HttpUrl, conlist

from geolib._compat import IS_PYDANTIC_V2

if IS_PYDANTIC_V2:
from pydantic import ValidationError
else:
from pydantic.error_wrappers import ValidationError

from pydantic import DirectoryPath, FilePath, HttpUrl, SerializeAsAny, ValidationError
from requests.auth import HTTPBasicAuth

from geolib.errors import CalculationError
from geolib.models import BaseDataClass

from .base_model_structure import BaseModelStructure
from .meta import MetaData
from .parsers import BaseParserProvider
from geolib.models.base_model_structure import BaseModelStructure
from geolib.models.meta import MetaData
from geolib.models.parsers import BaseParserProvider

logger = logging.getLogger(__name__)
meta = MetaData()


class BaseModel(BaseDataClass, abc.ABC):
filename: Optional[Path] = None
datastructure: Optional[BaseModelStructure] = None
datastructure: Optional[SerializeAsAny[BaseModelStructure]] = None
"""
This is the base class for all models in GEOLib.
Note that `datastructure` is a `SerializeAsAny` type, which means that
the inheriting class is serialized according to its own definition (duck-typing).
This is needed since Pydantic v2 as the default behavior has changed:
https://docs.pydantic.dev/latest/concepts/serialization/#subclass-instances-for-fields-of-basemodel-dataclasses-typeddict
"""

def execute(self, timeout_in_seconds: int = meta.timeout) -> "BaseModel":
"""Execute a Model and wait for `timeout` seconds.
Expand Down Expand Up @@ -153,7 +150,8 @@ def console_flags(self) -> List[str]:
def console_flags_post(self) -> List[str]:
return []

@abstractproperty
@property
@abstractmethod
def parser_provider_type(self) -> Type[BaseParserProvider]:
"""Returns the parser provider type of the current concrete class.
Expand Down Expand Up @@ -209,121 +207,6 @@ def set_meta_property(self, key: str, value: str) -> None:
raise ValueError(f"Metadata property {key} does not exist.")


class BaseModelList(BaseDataClass):
"""Hold multiple models that can be executed in parallel.
Note that all models need to have a unique filename
otherwise they will overwrite eachother. This also helps with
identifying them later."""

models: List[BaseModel]
errors: List[str] = []

def execute(
self,
calculation_folder: DirectoryPath,
timeout_in_seconds: int = meta.timeout,
nprocesses: Optional[int] = os.cpu_count(),
) -> "BaseModelList":
"""Execute all models in this class in parallel.
We split the list to separate folders and call a batch processes on each folder.
Note that the order of models will change.
"""

# manual check as remote execution could result in zero models
if len(self.models) == 0:
raise ValueError("Can't execute with zero models.")

lead_model = self.models[0]
processes = []
output_models = []
errors = []

# Divide the models over n processes and make sure to copy them to prevent aliasing
split_models = [self.models[i::nprocesses] for i in range(nprocesses)]
for i, models in enumerate(split_models):
if len(models) == 0:
continue
unique_folder = calculation_folder / str(i)
unique_folder.mkdir(parents=True, exist_ok=True)

for model in models:
fn = unique_folder / model.filename.name
model.serialize(fn.resolve())

executable = meta.console_folder / lead_model.default_console_path
if not executable.exists():
logger.error(
f"Please make sure the `geolib.env` file points to the console folder. GEOLib now can't find it at `{executable}`"
)
raise CalculationError(
-1, f"Console executable not found at {executable}."
)

process = Popen(
[str(executable)] + lead_model.console_flags + [str(i)],
cwd=str(calculation_folder.resolve()),
)
processes.append(process)

# Wait for all processes to be done
for process in processes:
logger.debug(f"Executed with {process.args}")
process.wait(timeout=timeout_in_seconds)

# Iterate over the models
for i, models in enumerate(split_models):
for model in models:
model = model.copy(deep=True) # prevent aliasing
output_filename = output_filename_from_input(model)
if output_filename.exists():
try:
model.parse(output_filename)
output_models.append(model)

except ValidationError:
logger.warning(
f"Ouput file generated but parsing of {output_filename.name} failed."
)
error = model.get_error_context()
errors.append(error)
else:
logger.warning(
f"Model @ {output_filename.name} failed. Please check the .err file and batchlog.txt in its folder."
)
error = model.get_error_context()
errors.append(error)

return self.__class__(models=output_models, errors=errors)

def execute_remote(self, endpoint: HttpUrl) -> "BaseModelList":
"""Execute all models in this class in parallel on a remote endpoint.
Note that the order of models will change.
"""
lead_model = self.models[0]

response = requests.post(
requests.compat.urljoin(
endpoint, f"calculate/{lead_model.__class__.__name__.lower()}s"
),
data="[" + ",".join((model.json() for model in self.models)) + "]",
auth=HTTPBasicAuth(meta.gl_username, meta.gl_password),
)
if response.status_code == 200:
models = response.json()["models"]
errors = response.json()["errors"]
stripped_models = []
for model in models:
# remove possibly invalid external metadata
model.get("meta", {}).pop("console_folder", None)
stripped_models.append(lead_model.__class__(**model))
return self.__class__(models=stripped_models, errors=errors)
else:
raise CalculationError(response.status_code, response.text)


def output_filename_from_input(model: BaseModel, extension: str = None) -> Path:
if not extension:
extension = model.parser_provider_type().output_parsers[-1].suffix_list[0]
Expand Down
Loading

0 comments on commit 1b20cfe

Please sign in to comment.