diff --git a/ci/helpers/requirements.txt b/ci/helpers/requirements.txt index 55a3ab4e163..daf26ed5c0b 100644 --- a/ci/helpers/requirements.txt +++ b/ci/helpers/requirements.txt @@ -1,17 +1,19 @@ # This file was autogenerated by uv via the following command: -# uv pip compile requirements.in +# uv pip compile requirements.in -o requirements.txt aiohttp==3.9.5 + # via + # -c ../../requirements/constraints.txt + # -r requirements.in aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via starlette -async-timeout==4.0.3 - # via aiohttp attrs==23.2.0 # via aiohttp -exceptiongroup==1.2.1 - # via anyio -fastapi==0.99.1 +fastapi==0.115.0 + # via -r requirements.in frozenlist==1.4.1 # via # aiohttp @@ -24,16 +26,22 @@ multidict==6.0.5 # via # aiohttp # yarl -pydantic==1.10.15 - # via fastapi +pydantic==2.9.2 + # via + # -c ../../requirements/constraints.txt + # fastapi +pydantic-core==2.23.4 + # via pydantic sniffio==1.3.1 # via anyio -starlette==0.27.0 - # via fastapi +starlette==0.38.6 + # via + # -c ../../requirements/constraints.txt + # fastapi typing-extensions==4.11.0 # via - # anyio # fastapi # pydantic + # pydantic-core yarl==1.9.4 # via aiohttp diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 50f6f157311..53832fe75c5 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -32,6 +32,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -105,7 +107,7 @@ orjson==3.10.7 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -120,6 +122,20 @@ pydantic==1.10.17 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -128,6 +144,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -198,6 +216,7 @@ typing-extensions==4.12.2 # aiodebug # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index 213e7ccb561..1a9b4a7afa2 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto appdirs==1.4.4 @@ -151,11 +155,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.3 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -193,7 +201,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -268,6 +278,7 @@ typing-extensions==4.12.2 # flexparser # pint # pydantic + # pydantic-core # types-aioboto3 # types-aiobotocore urllib3==2.2.2 diff --git a/packages/aws-library/requirements/ci.txt b/packages/aws-library/requirements/ci.txt index 7fcd69a010f..f39ca4e3672 100644 --- a/packages/aws-library/requirements/ci.txt +++ b/packages/aws-library/requirements/ci.txt @@ -11,8 +11,9 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library +simcore-models-library @ ../models-library/ pytest-simcore @ ../pytest-simcore -simcore-models-library @ ../models-library simcore-service-library @ ../service-library/ simcore-settings-library @ ../settings-library/ diff --git a/packages/aws-library/requirements/dev.txt b/packages/aws-library/requirements/dev.txt index f8956725407..34cc644b370 100644 --- a/packages/aws-library/requirements/dev.txt +++ b/packages/aws-library/requirements/dev.txt @@ -12,8 +12,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../pytest-simcore +--editable ../common-library/ --editable ../models-library/ +--editable ../pytest-simcore/ --editable ../service-library/ --editable ../settings-library/ diff --git a/packages/aws-library/src/aws_library/ec2/__init__.py b/packages/aws-library/src/aws_library/ec2/__init__.py index 02fcf10b00e..112c70861b2 100644 --- a/packages/aws-library/src/aws_library/ec2/__init__.py +++ b/packages/aws-library/src/aws_library/ec2/__init__.py @@ -1,6 +1,10 @@ from ._client import SimcoreEC2API from ._errors import EC2AccessError, EC2NotConnectedError, EC2RuntimeError from ._models import ( + AWS_TAG_KEY_MAX_LENGTH, + AWS_TAG_KEY_MIN_LENGTH, + AWS_TAG_VALUE_MAX_LENGTH, + AWS_TAG_VALUE_MIN_LENGTH, AWSTagKey, AWSTagValue, EC2InstanceBootSpecific, @@ -14,6 +18,10 @@ __all__: tuple[str, ...] = ( "AWSTagKey", "AWSTagValue", + "AWS_TAG_KEY_MIN_LENGTH", + "AWS_TAG_KEY_MAX_LENGTH", + "AWS_TAG_VALUE_MIN_LENGTH", + "AWS_TAG_VALUE_MAX_LENGTH", "EC2AccessError", "EC2InstanceBootSpecific", "EC2InstanceConfig", diff --git a/packages/aws-library/src/aws_library/ec2/_errors.py b/packages/aws-library/src/aws_library/ec2/_errors.py index c39047db00d..4fb0e611ed2 100644 --- a/packages/aws-library/src/aws_library/ec2/_errors.py +++ b/packages/aws-library/src/aws_library/ec2/_errors.py @@ -1,12 +1,9 @@ # pylint: disable=too-many-ancestors -from typing import Any - -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class EC2BaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + pass class EC2RuntimeError(EC2BaseError, RuntimeError): diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index fed1f3ea46a..ad15a74f0eb 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -2,18 +2,19 @@ import re import tempfile from dataclasses import dataclass -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, Final, TypeAlias import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag from pydantic import ( BaseModel, ByteSize, - ConstrainedStr, + ConfigDict, Field, NonNegativeFloat, NonNegativeInt, - validator, + StringConstraints, + field_validator, ) from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType @@ -33,26 +34,26 @@ def __gt__(self, other: "Resources") -> bool: return self.cpus > other.cpus or self.ram > other.ram def __add__(self, other: "Resources") -> "Resources": - return Resources.construct( + return Resources.model_construct( **{ key: a + b for (key, a), b in zip( - self.dict().items(), other.dict().values(), strict=True + self.model_dump().items(), other.model_dump().values(), strict=True ) } ) def __sub__(self, other: "Resources") -> "Resources": - return Resources.construct( + return Resources.model_construct( **{ key: a - b for (key, a), b in zip( - self.dict().items(), other.dict().values(), strict=True + self.model_dump().items(), other.model_dump().values(), strict=True ) } ) - @validator("cpus", pre=True) + @field_validator("cpus", mode="before") @classmethod def _floor_cpus_to_0(cls, v: float) -> float: return max(v, 0) @@ -67,19 +68,31 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str -class AWSTagKey(ConstrainedStr): +AWS_TAG_KEY_MIN_LENGTH: Final[int] = 1 +AWS_TAG_KEY_MAX_LENGTH: Final[int] = 128 +AWSTagKey: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] - regex = re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$") - min_length = 1 - max_length = 128 - - -class AWSTagValue(ConstrainedStr): + str, + StringConstraints( + min_length=AWS_TAG_KEY_MIN_LENGTH, + max_length=AWS_TAG_KEY_MAX_LENGTH, + pattern=re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$"), + ), +] + + +AWS_TAG_VALUE_MIN_LENGTH: Final[int] = 0 +AWS_TAG_VALUE_MAX_LENGTH: Final[int] = 256 +AWSTagValue: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] # quotes []{} were added as it allows to json encode. it seems to be accepted as a value - regex = re.compile(r"^[a-zA-Z0-9\s\+\-=\.,_:/@\"\'\[\]\{\}]*$") - min_length = 0 - max_length = 256 + str, + StringConstraints( + min_length=0, + max_length=256, + pattern=r"^[a-zA-Z0-9\s\+\-=\.,_:/@\"\'\[\]\{\}]*$", + ), +] EC2Tags: TypeAlias = dict[AWSTagKey, AWSTagValue] @@ -148,8 +161,23 @@ class EC2InstanceBootSpecific(BaseModel): default=0, description="number of buffer EC2s to keep (defaults to 0)" ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + @field_validator("custom_boot_scripts") + @classmethod + def validate_bash_calls(cls, v): + try: + with tempfile.NamedTemporaryFile(mode="wt", delete=True) as temp_file: + temp_file.writelines(v) + temp_file.flush() + # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes + sh.bash("-n", temp_file.name) + except sh.ErrorReturnCode as exc: + msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" + raise ValueError(msg) from exc + + return v + + model_config = ConfigDict( + json_schema_extra={ "examples": [ { # just AMI @@ -205,18 +233,4 @@ class Config: }, ] } - - @validator("custom_boot_scripts") - @classmethod - def validate_bash_calls(cls, v): - try: - with tempfile.NamedTemporaryFile(mode="wt", delete=True) as temp_file: - temp_file.writelines(v) - temp_file.flush() - # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes - sh.bash("-n", temp_file.name) - except sh.ErrorReturnCode as exc: - msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" - raise ValueError(msg) from exc - - return v + ) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 4ddb2bfb9c2..816208c22c8 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -13,9 +13,10 @@ from boto3.s3.transfer import TransferConfig from botocore import exceptions as botocore_exc from botocore.client import Config +from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.logging_utils import log_catch, log_context from servicelib.utils import limited_gather from settings_library.s3 import S3Settings @@ -44,6 +45,9 @@ _AWS_MAX_ITEMS_PER_PAGE: Final[int] = 1000 +ListAnyUrlTypeAdapter: Final[TypeAdapter[list[AnyUrl]]] = TypeAdapter(list[AnyUrl]) + + class UploadedBytesTransferredCallback(Protocol): def __call__(self, bytes_transferred: int, *, file_name: str) -> None: ... @@ -251,7 +255,7 @@ async def create_single_presigned_download_link( bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int, - ) -> AnyUrl: + ) -> str: # NOTE: ensure the bucket/object exists, this will raise if not await self._client.head_bucket(Bucket=bucket) await self._client.head_object(Bucket=bucket, Key=object_key) @@ -260,13 +264,12 @@ async def create_single_presigned_download_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url + return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}" @s3_exception_handler(_logger) async def create_single_presigned_upload_link( self, *, bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int - ) -> AnyUrl: + ) -> str: # NOTE: ensure the bucket/object exists, this will raise if not await self._client.head_bucket(Bucket=bucket) generated_link = await self._client.generate_presigned_url( @@ -274,8 +277,7 @@ async def create_single_presigned_upload_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url + return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}" @s3_exception_handler(_logger) async def create_multipart_upload_links( @@ -298,8 +300,7 @@ async def create_multipart_upload_links( # compute the number of links, based on the announced file size num_upload_links, chunk_size = compute_num_file_chunks(file_size) # now create the links - upload_links = parse_obj_as( - list[AnyUrl], + upload_links = ListAnyUrlTypeAdapter.validate_python( await asyncio.gather( *( self._client.generate_presigned_url( @@ -473,7 +474,6 @@ def is_multipart(file_size: ByteSize) -> bool: @staticmethod def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl: - url: AnyUrl = parse_obj_as( - AnyUrl, f"s3://{bucket}/{urllib.parse.quote(object_key)}" + return AnyUrlLegacyAdapter.validate_python( + f"s3://{bucket}/{urllib.parse.quote(object_key)}" ) - return url diff --git a/packages/aws-library/src/aws_library/s3/_constants.py b/packages/aws-library/src/aws_library/s3/_constants.py index 05f2b3dc6d6..a94cd555f43 100644 --- a/packages/aws-library/src/aws_library/s3/_constants.py +++ b/packages/aws-library/src/aws_library/s3/_constants.py @@ -1,10 +1,14 @@ from typing import Final -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter # NOTE: AWS S3 upload limits https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html -MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") -MULTIPART_COPY_THRESHOLD: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") +MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = TypeAdapter( + ByteSize +).validate_python("100MiB") +MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "100MiB" +) -PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5GiB") -S3_MAX_FILE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5TiB") +PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB") +S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB") diff --git a/packages/aws-library/src/aws_library/s3/_errors.py b/packages/aws-library/src/aws_library/s3/_errors.py index f297b04b64d..d14105dbd30 100644 --- a/packages/aws-library/src/aws_library/s3/_errors.py +++ b/packages/aws-library/src/aws_library/s3/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class S3RuntimeError(PydanticErrorMixin, RuntimeError): +class S3RuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "S3 client unexpected error" @@ -10,25 +10,25 @@ class S3NotConnectedError(S3RuntimeError): class S3AccessError(S3RuntimeError): - code = "s3_access.error" + code = "s3_access.error" # type: ignore[assignment] msg_template: str = "Unexpected error while accessing S3 backend" class S3BucketInvalidError(S3AccessError): - code = "s3_bucket.invalid_error" + code = "s3_bucket.invalid_error" # type: ignore[assignment] msg_template: str = "The bucket '{bucket}' is invalid" class S3KeyNotFoundError(S3AccessError): - code = "s3_key.not_found_error" + code = "s3_key.not_found_error" # type: ignore[assignment] msg_template: str = "The file {key} in {bucket} was not found" class S3UploadNotFoundError(S3AccessError): - code = "s3_upload.not_found_error" + code = "s3_upload.not_found_error" # type: ignore[assignment] msg_template: str = "The upload for {key} in {bucket} was not found" class S3DestinationNotEmptyError(S3AccessError): - code = "s3_destination.not_empty_error" + code = "s3_destination.not_empty_error" # type: ignore[assignment] msg_template: str = "The destination {dst_prefix} is not empty" diff --git a/packages/aws-library/src/aws_library/s3/_utils.py b/packages/aws-library/src/aws_library/s3/_utils.py index 00a1bcc59bb..96ad59f57d3 100644 --- a/packages/aws-library/src/aws_library/s3/_utils.py +++ b/packages/aws-library/src/aws_library/s3/_utils.py @@ -1,13 +1,13 @@ from typing import Final -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter _MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000 # this is artifically defined, if possible we keep a maximum number of requests for parallel # uploading. If that is not possible then we create as many upload part as the max part size allows _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: Final[list[ByteSize]] = [ - parse_obj_as(ByteSize, x) + TypeAdapter(ByteSize).validate_python(x) for x in [ "10Mib", "50Mib", diff --git a/packages/aws-library/src/aws_library/ssm/_errors.py b/packages/aws-library/src/aws_library/ssm/_errors.py index 32300d08d29..5d3ea16b6c6 100644 --- a/packages/aws-library/src/aws_library/ssm/_errors.py +++ b/packages/aws-library/src/aws_library/ssm/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class SSMRuntimeError(PydanticErrorMixin, RuntimeError): +class SSMRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "SSM client unexpected error" diff --git a/packages/aws-library/tests/test_ec2_models.py b/packages/aws-library/tests/test_ec2_models.py index f7c114932be..ed232ad0043 100644 --- a/packages/aws-library/tests/test_ec2_models.py +++ b/packages/aws-library/tests/test_ec2_models.py @@ -6,7 +6,7 @@ import pytest from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2InstanceData, Resources from faker import Faker -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError @pytest.mark.parametrize( @@ -88,9 +88,9 @@ def test_resources_gt_operator(a: Resources, b: Resources, a_greater_than_b: boo Resources(cpus=1, ram=ByteSize(34)), ), ( - Resources(cpus=0.1, ram=ByteSize(-1)), + Resources(cpus=0.1, ram=ByteSize(1)), Resources(cpus=1, ram=ByteSize(34)), - Resources(cpus=1.1, ram=ByteSize(33)), + Resources(cpus=1.1, ram=ByteSize(35)), ), ], ) @@ -108,14 +108,14 @@ def test_resources_create_as_empty(): "a,b,result", [ ( - Resources(cpus=0, ram=ByteSize(0)), - Resources(cpus=1, ram=ByteSize(34)), - Resources.construct(cpus=-1, ram=ByteSize(-34)), + Resources(cpus=0, ram=ByteSize(34)), + Resources(cpus=1, ram=ByteSize(0)), + Resources.model_construct(cpus=-1, ram=ByteSize(34)), ), ( - Resources(cpus=0.1, ram=ByteSize(-1)), - Resources(cpus=1, ram=ByteSize(34)), - Resources.construct(cpus=-0.9, ram=ByteSize(-35)), + Resources(cpus=0.1, ram=ByteSize(34)), + Resources(cpus=1, ram=ByteSize(1)), + Resources.model_construct(cpus=-0.9, ram=ByteSize(33)), ), ], ) @@ -129,10 +129,10 @@ def test_resources_sub(a: Resources, b: Resources, result: Resources): def test_aws_tag_key_invalid(ec2_tag_key: str): # for a key it raises with pytest.raises(ValidationError): - parse_obj_as(AWSTagKey, ec2_tag_key) + TypeAdapter(AWSTagKey).validate_python(ec2_tag_key) # for a value it does not - parse_obj_as(AWSTagValue, ec2_tag_key) + TypeAdapter(AWSTagValue).validate_python(ec2_tag_key) def test_ec2_instance_data_hashable(faker: Faker): diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 93ee29fe5b0..af98fd6523f 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -14,7 +14,7 @@ from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import Any, Final import botocore.exceptions import pytest @@ -32,7 +32,7 @@ from models_library.api_schemas_storage import S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.parametrizations import ( @@ -49,6 +49,8 @@ from types_aiobotocore_s3 import S3Client from types_aiobotocore_s3.literals import BucketLocationConstraintType +_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize) + @pytest.fixture async def simcore_s3_api( @@ -67,7 +69,9 @@ async def simcore_s3_api( @pytest.fixture def bucket_name(faker: Faker) -> S3BucketName: # NOTE: no faker here as we need some specific namings - return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + return TypeAdapter(S3BucketName).validate_python( + faker.pystr().replace("_", "-").lower() + ) @pytest.fixture @@ -89,7 +93,9 @@ async def with_s3_bucket( @pytest.fixture def non_existing_s3_bucket(faker: Faker) -> S3BucketName: - return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + return TypeAdapter(S3BucketName).validate_python( + faker.pystr().replace("_", "-").lower() + ) @pytest.fixture @@ -107,7 +113,7 @@ async def _( file, MultiPartUploadLinks( upload_id="fake", - chunk_size=parse_obj_as(ByteSize, file.stat().st_size), + chunk_size=_BYTE_SIZE_ADAPTER.validate_python(file.stat().st_size), urls=[presigned_url], ), ) @@ -131,7 +137,7 @@ async def with_uploaded_file_on_s3( s3_client: S3Client, with_s3_bucket: S3BucketName, ) -> AsyncIterator[UploadedFile]: - test_file = create_file_of_size(parse_obj_as(ByteSize, "10Kib")) + test_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("10Kib")) await s3_client.upload_file( Filename=f"{test_file}", Bucket=with_s3_bucket, @@ -200,7 +206,7 @@ async def _uploader( object_key=object_key, file_size=ByteSize(file.stat().st_size), expiration_secs=default_expiration_time_seconds, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + sha256_checksum=TypeAdapter(SHA256Str).validate_python(faker.sha256()), ) assert upload_links @@ -586,7 +592,7 @@ async def test_undelete_file( assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size # upload another file on top of the existing one - new_file = create_file_of_size(parse_obj_as(ByteSize, "5Kib")) + new_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("5Kib")) await s3_client.upload_file( Filename=f"{new_file}", Bucket=with_s3_bucket, @@ -688,7 +694,7 @@ async def test_create_single_presigned_download_link( object_key=with_uploaded_file_on_s3.s3_key, expiration_secs=default_expiration_time_seconds, ) - assert isinstance(download_url, AnyUrl) + assert download_url dest_file = tmp_path / faker.file_name() async with ClientSession() as session: @@ -738,10 +744,10 @@ async def test_create_single_presigned_upload_link( create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, upload_to_presigned_link: Callable[ - [Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None] + [Path, str, S3BucketName, S3ObjectKey], Awaitable[None] ], ): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib")) s3_object_key = file.name presigned_url = await simcore_s3_api.create_single_presigned_upload_link( bucket=with_s3_bucket, @@ -769,7 +775,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, ): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib")) s3_object_key = file.name with pytest.raises(S3BucketInvalidError): await simcore_s3_api.create_single_presigned_upload_link( @@ -863,7 +869,7 @@ async def test_create_multipart_presigned_upload_link_invalid_raises( object_key=faker.pystr(), file_size=ByteSize(file.stat().st_size), expiration_secs=default_expiration_time_seconds, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + sha256_checksum=TypeAdapter(SHA256Str).validate_python(faker.sha256()), ) # completing with invalid bucket @@ -1076,7 +1082,7 @@ async def test_copy_file_invalid_raises( create_file_of_size: Callable[[ByteSize], Path], faker: Faker, ): - file = create_file_of_size(parse_obj_as(ByteSize, "1MiB")) + file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1MiB")) uploaded_file = await upload_file(file) dst_object_key = faker.file_name() # NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError @@ -1101,9 +1107,9 @@ async def test_copy_file_invalid_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1127,9 +1133,9 @@ async def test_get_directory_metadata( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1159,9 +1165,9 @@ async def test_get_directory_metadata_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1195,9 +1201,9 @@ async def test_delete_file_recursively( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1233,9 +1239,9 @@ async def test_delete_file_recursively_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1298,7 +1304,7 @@ def test_compute_s3_url( bucket: S3BucketName, object_key: S3ObjectKey, expected_s3_url: AnyUrl ): assert ( - SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key) + str(SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key)) == expected_s3_url ) @@ -1333,14 +1339,14 @@ def run_async_test(*args, **kwargs) -> None: "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ), ( - parse_obj_as(ByteSize, "500Mib"), - parse_obj_as(ByteSize, "10Mib"), - parse_obj_as(ByteSize, "50Mib"), + _BYTE_SIZE_ADAPTER.validate_python("500Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("50Mib"), ), ], ids=byte_size_ids, diff --git a/packages/aws-library/tests/test_s3_utils.py b/packages/aws-library/tests/test_s3_utils.py index 5354da8bc66..bf021978585 100644 --- a/packages/aws-library/tests/test_s3_utils.py +++ b/packages/aws-library/tests/test_s3_utils.py @@ -4,29 +4,73 @@ # pylint: disable=unused-variable +from typing import Final + import pytest from aws_library.s3._utils import ( _MULTIPART_MAX_NUMBER_OF_PARTS, _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE, compute_num_file_chunks, ) -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.parametrizations import byte_size_ids +_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize) + @pytest.mark.parametrize( "file_size, expected_num_chunks, expected_chunk_size", [ - (parse_obj_as(ByteSize, "5Mib"), 1, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "10Mib"), 1, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "20Mib"), 2, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "50Mib"), 5, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "150Mib"), 15, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "550Mib"), 55, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "560Gib"), 5735, parse_obj_as(ByteSize, "100Mib")), - (parse_obj_as(ByteSize, "5Tib"), 8739, parse_obj_as(ByteSize, "600Mib")), - (parse_obj_as(ByteSize, "15Tib"), 7680, parse_obj_as(ByteSize, "2Gib")), - (parse_obj_as(ByteSize, "9431773844"), 900, parse_obj_as(ByteSize, "10Mib")), + ( + _BYTE_SIZE_ADAPTER.validate_python("5Mib"), + 1, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + 1, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("20Mib"), + 2, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("50Mib"), + 5, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("150Mib"), + 15, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("550Mib"), + 55, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("560Gib"), + 5735, + _BYTE_SIZE_ADAPTER.validate_python("100Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("5Tib"), + 8739, + _BYTE_SIZE_ADAPTER.validate_python("600Mib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("15Tib"), + 7680, + _BYTE_SIZE_ADAPTER.validate_python("2Gib"), + ), + ( + _BYTE_SIZE_ADAPTER.validate_python("9431773844"), + 900, + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + ), ], ids=byte_size_ids, ) @@ -39,8 +83,7 @@ def test_compute_num_file_chunks( def test_enormous_file_size_raises_value_error(): - enormous_file_size = parse_obj_as( - ByteSize, + enormous_file_size = _BYTE_SIZE_ADAPTER.validate_python( ( max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE) * _MULTIPART_MAX_NUMBER_OF_PARTS diff --git a/packages/common-library/requirements/_test.in b/packages/common-library/requirements/_test.in index 4454d79d36a..1fe37ac0151 100644 --- a/packages/common-library/requirements/_test.in +++ b/packages/common-library/requirements/_test.in @@ -10,6 +10,7 @@ coverage faker +pydantic-settings pytest pytest-asyncio pytest-cov diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt index 2354abd790d..89b9a19eca6 100644 --- a/packages/common-library/requirements/_test.txt +++ b/packages/common-library/requirements/_test.txt @@ -1,8 +1,12 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==30.1.0 +faker==30.3.0 # via -r requirements/_test.in icdiff==2.0.7 # via pytest-icdiff @@ -16,6 +20,17 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +pydantic==2.9.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pydantic-settings +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic +pydantic-settings==2.5.2 + # via -r requirements/_test.in pytest==8.3.3 # via # -r requirements/_test.in @@ -44,7 +59,9 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings six==1.16.0 # via python-dateutil termcolor==2.5.0 @@ -53,3 +70,5 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # faker + # pydantic + # pydantic-core diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt index a333bb822ae..b5f85d4efcc 100644 --- a/packages/common-library/requirements/_tools.txt +++ b/packages/common-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.5 # via pylint -black==24.8.0 +black==24.10.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools diff --git a/packages/common-library/src/common_library/errors_classes.py b/packages/common-library/src/common_library/errors_classes.py index 1438536d550..99ed586e744 100644 --- a/packages/common-library/src/common_library/errors_classes.py +++ b/packages/common-library/src/common_library/errors_classes.py @@ -1,3 +1,4 @@ +from typing import Any from pydantic.errors import PydanticErrorMixin @@ -15,8 +16,8 @@ def __new__(cls, *_args, **_kwargs): cls.code = cls._get_full_class_name() return super().__new__(cls) - def __init__(self, *_args, **kwargs) -> None: - self.__dict__ = kwargs + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx super().__init__(message=self._build_message(), code=self.code) # type: ignore[arg-type] def __str__(self) -> str: diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index b53a2bfc8ae..79c5da906b1 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -1,5 +1,25 @@ from typing import Annotated, TypeAlias -from pydantic import AfterValidator, AnyHttpUrl +from pydantic import AfterValidator, AnyHttpUrl, AnyUrl, HttpUrl +from pydantic_core import Url -AnyHttpUrlLegacy: TypeAlias = Annotated[str, AnyHttpUrl, AfterValidator(lambda u: u.rstrip("/"))] + +def _strip_last_slash(url: Url) -> str: + return f"{url}".rstrip("/") + + +AnyUrlLegacy: TypeAlias = Annotated[ + AnyUrl, + AfterValidator(_strip_last_slash), +] + +AnyHttpUrlLegacy: TypeAlias = Annotated[ + AnyHttpUrl, + AfterValidator(_strip_last_slash), +] + + +HttpUrlLegacy: TypeAlias = Annotated[ + HttpUrl, + AfterValidator(_strip_last_slash), +] diff --git a/packages/common-library/src/common_library/pydantic_type_adapters.py b/packages/common-library/src/common_library/pydantic_type_adapters.py new file mode 100644 index 00000000000..883100c4717 --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_type_adapters.py @@ -0,0 +1,8 @@ +from typing import Final + +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy, AnyUrlLegacy +from pydantic import TypeAdapter + +AnyUrlLegacyAdapter: Final[TypeAdapter[AnyUrlLegacy]] = TypeAdapter(AnyUrlLegacy) + +AnyHttpUrlLegacyAdapter: Final[TypeAdapter[AnyHttpUrlLegacy]] = TypeAdapter(AnyHttpUrlLegacy) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 510bdf6a469..a2178cc3ae6 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,3 +1,4 @@ +from datetime import timedelta from typing import Any from common_library.pydantic_fields_extension import get_type @@ -15,6 +16,9 @@ def model_dump_with_secrets( field_data = data[field_name] + if isinstance(field_data, timedelta): + data[field_name] = field_data.total_seconds() + if isinstance(field_data, SecretStr): if show_secrets: data[field_name] = field_data.get_secret_value() diff --git a/packages/common-library/tests/test_errors_classes.py b/packages/common-library/tests/test_errors_classes.py index ae0ed8c1e3d..63674fbd3b4 100644 --- a/packages/common-library/tests/test_errors_classes.py +++ b/packages/common-library/tests/test_errors_classes.py @@ -38,8 +38,7 @@ class B12(B1, ValueError): def test_error_codes_and_msg_template(): class MyBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) # Do not forget this for base exceptions! + pass class MyValueError(MyBaseError, ValueError): msg_template = "Wrong value {value}" diff --git a/packages/common-library/tests/test_pydantic_fields_extension.py b/packages/common-library/tests/test_pydantic_fields_extension.py index 50ff5443c41..9f5aa1ae2fc 100644 --- a/packages/common-library/tests/test_pydantic_fields_extension.py +++ b/packages/common-library/tests/test_pydantic_fields_extension.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Callable, Literal import pytest from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable @@ -68,5 +68,5 @@ class MyModel(BaseModel): (is_nullable, False, "e"), ], ) -def test_field_fn(fn, expected, name): +def test_field_fn(fn: Callable[[Any], Any], expected: Any, name: str): assert expected == fn(MyModel.model_fields[name]) diff --git a/packages/common-library/tests/test_pydantic_networks_extension.py b/packages/common-library/tests/test_pydantic_networks_extension.py index 3390f7c2acf..6ab50a42a2b 100644 --- a/packages/common-library/tests/test_pydantic_networks_extension.py +++ b/packages/common-library/tests/test_pydantic_networks_extension.py @@ -1,20 +1,39 @@ +import pytest from common_library.pydantic_networks_extension import AnyHttpUrlLegacy -from pydantic import AnyHttpUrl, TypeAdapter +from pydantic import AnyHttpUrl, BaseModel, TypeAdapter, ValidationError from pydantic_core import Url +class A(BaseModel): + url: AnyHttpUrlLegacy + + def test_any_http_url(): url = TypeAdapter(AnyHttpUrl).validate_python( "http://backgroud.testserver.io", ) assert isinstance(url, Url) - assert f"{url}" == "http://backgroud.testserver.io/" # NOTE: trailing '/' added in Pydantic v2 + assert ( + f"{url}" == "http://backgroud.testserver.io/" + ) # trailing slash added (in Pydantic v2) + def test_any_http_url_legacy(): url = TypeAdapter(AnyHttpUrlLegacy).validate_python( - "http://backgroud.testserver.io", + "http://backgroud.testserver.io", ) assert isinstance(url, str) - assert url == "http://backgroud.testserver.io" + assert url == "http://backgroud.testserver.io" # no trailing slash was added + + +def test_valid_any_http_url_legacy_field(): + a = A(url="http://backgroud.testserver.io") # type: ignore + + assert a.url == "http://backgroud.testserver.io" # no trailing slash was added + + +def test_not_valid_any_http_url_legacy_field(): + with pytest.raises(ValidationError): + A(url="htttttp://backgroud.testserver.io") # type: ignore diff --git a/packages/common-library/tests/test_serialization.py b/packages/common-library/tests/test_serialization.py index d53db58809c..d897ff5ec5d 100644 --- a/packages/common-library/tests/test_serialization.py +++ b/packages/common-library/tests/test_serialization.py @@ -1,5 +1,3 @@ -from typing import Final - import pytest from common_library.serialization import model_dump_with_secrets from pydantic import BaseModel, SecretStr @@ -10,9 +8,6 @@ class Credentials(BaseModel): PASSWORD: SecretStr | None = None -ME: Final[Credentials] = Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) - - @pytest.mark.parametrize( "expected,show_secrets", [ @@ -27,4 +22,4 @@ class Credentials(BaseModel): ], ) def test_model_dump_with_secrets(expected: dict, show_secrets: bool): - assert expected == model_dump_with_secrets(ME, show_secrets=show_secrets) + assert expected == model_dump_with_secrets(Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")), show_secrets=show_secrets) diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index 327a04c5678..81807f5ffe6 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in attrs==24.2.0 @@ -65,7 +67,7 @@ partd==1.4.2 # via dask psutil==6.0.0 # via distributed -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -73,10 +75,22 @@ pydantic==1.10.17 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -118,6 +132,7 @@ types-python-dateutil==2.9.0.20240821 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/packages/dask-task-models-library/requirements/_test.txt b/packages/dask-task-models-library/requirements/_test.txt index aa7e91d4d23..521d13265d9 100644 --- a/packages/dask-task-models-library/requirements/_test.txt +++ b/packages/dask-task-models-library/requirements/_test.txt @@ -4,8 +4,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in flexcache==0.3 @@ -67,10 +65,6 @@ six==1.16.0 # python-dateutil termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest typing-extensions==4.12.2 # via # -c requirements/_base.txt diff --git a/packages/dask-task-models-library/requirements/_tools.txt b/packages/dask-task-models-library/requirements/_tools.txt index b76f8083592..b9ee0a3c96d 100644 --- a/packages/dask-task-models-library/requirements/_tools.txt +++ b/packages/dask-task-models-library/requirements/_tools.txt @@ -70,22 +70,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py index 4e9d36df3fb..b4fa976b665 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra, SecretStr +from pydantic import BaseModel, ConfigDict, SecretStr class DockerBasicAuth(BaseModel): @@ -6,9 +6,9 @@ class DockerBasicAuth(BaseModel): username: str password: SecretStr - class Config: - extra = Extra.forbid - schema_extra = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "server_address": "docker.io", @@ -16,4 +16,5 @@ class Config: "password": "123456", } ] - } + }, + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py index f4060531f7f..f0a6813ba15 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py @@ -1,20 +1,20 @@ """ Dask task exceptions """ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class TaskValueError(PydanticErrorMixin, ValueError): - code = "task.value_error" +class TaskValueError(OsparcErrorMixin, ValueError): + code = "task.value_error" # type: ignore[assignment] -class TaskCancelledError(PydanticErrorMixin, RuntimeError): - code = "task.cancelled_error" +class TaskCancelledError(OsparcErrorMixin, RuntimeError): + code = "task.cancelled_error" # type: ignore[assignment] msg_template = "The task was cancelled" -class ServiceRuntimeError(PydanticErrorMixin, RuntimeError): - code = "service.runtime_error" +class ServiceRuntimeError(OsparcErrorMixin, RuntimeError): + code = "service.runtime_error" # type: ignore[assignment] msg_template = ( "The service {service_key}:{service_version}" " running in container {container_id} failed with code" diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py index 1455c00cbff..a27bb027e94 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py @@ -1,10 +1,10 @@ import logging from abc import ABC, abstractmethod -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias import dask.typing from distributed.worker import get_worker -from pydantic import BaseModel, Extra, validator +from pydantic import BaseModel, ConfigDict, field_validator from .protocol import TaskOwner @@ -19,8 +19,7 @@ class BaseTaskEvent(BaseModel, ABC): def topic_name() -> str: raise NotImplementedError - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") def _dask_key_to_dask_task_id(key: dask.typing.Key) -> str: @@ -51,8 +50,8 @@ def from_dask_worker( task_owner=task_owner, ) - class Config(BaseTaskEvent.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "job_id": "simcore/services/comp/sleeper:1.1.0:projectid_ec7e595a-63ee-46a1-a04a-901b11b649f8:nodeid_39467d89-b659-4914-9359-c40b1b6d1d6d:uuid_5ee5c655-450d-4711-a3ec-32ffe16bc580", @@ -78,8 +77,9 @@ class Config(BaseTaskEvent.Config): }, ] } + ) - @validator("progress", always=True) + @field_validator("progress") @classmethod def ensure_between_0_1(cls, v): if 0 <= v <= 1: @@ -112,8 +112,8 @@ def from_dask_worker( task_owner=task_owner, ) - class Config(BaseTaskEvent.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "job_id": "simcore/services/comp/sleeper:1.1.0:projectid_ec7e595a-63ee-46a1-a04a-901b11b649f8:nodeid_39467d89-b659-4914-9359-c40b1b6d1d6d:uuid_5ee5c655-450d-4711-a3ec-32ffe16bc580", @@ -129,3 +129,4 @@ class Config(BaseTaskEvent.Config): }, ] } + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index 887397d4227..0bb95130723 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -1,7 +1,7 @@ import json from contextlib import suppress from pathlib import Path -from typing import Any, ClassVar, TypeAlias, Union +from typing import Any, TypeAlias, Union from models_library.basic_regex import MIME_TYPE_RE from models_library.generics import DictModel @@ -9,7 +9,7 @@ from pydantic import ( AnyUrl, BaseModel, - Extra, + ConfigDict, Field, StrictBool, StrictFloat, @@ -23,9 +23,9 @@ class PortSchema(BaseModel): required: bool - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "required": True, @@ -34,15 +34,16 @@ class Config: "required": False, }, ] - } + }, + ) class FilePortSchema(PortSchema): mapping: str | None = None url: AnyUrl - class Config(PortSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "mapping": "some_filename.txt", @@ -55,6 +56,7 @@ class Config(PortSchema.Config): }, ] } + ) class FileUrl(BaseModel): @@ -64,12 +66,12 @@ class FileUrl(BaseModel): description="Local file relpath name (if given), otherwise it takes the url filename", ) file_mime_type: str | None = Field( - default=None, description="the file MIME type", regex=MIME_TYPE_RE + default=None, description="the file MIME type", pattern=MIME_TYPE_RE ) - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ {"url": "https://some_file_url", "file_mime_type": "application/json"}, { @@ -78,7 +80,8 @@ class Config: "file_mime_type": "application/json", }, ] - } + }, + ) PortValue: TypeAlias = Union[ @@ -94,8 +97,8 @@ class Config: class TaskInputData(DictModel[ServicePortKey, PortValue]): - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_input": False, @@ -106,6 +109,7 @@ class Config: }, ] } + ) PortSchemaValue: TypeAlias = Union[PortSchema, FilePortSchema] @@ -118,8 +122,8 @@ class TaskOutputDataSchema(DictModel[ServicePortKey, PortSchemaValue]): # does not work well in that case. For that reason, the schema is # sent as a json-schema instead of with a dynamically-created model class # - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_output": {"required": False}, @@ -138,6 +142,7 @@ class Config: }, ] } + ) class TaskOutputData(DictModel[ServicePortKey, PortValue]): @@ -170,10 +175,10 @@ def from_task_output( msg = f"Could not locate '{output_key}' in {output_data_file}" raise ValueError(msg) - return cls.parse_obj(data) + return cls.model_validate(data) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_output": False, @@ -184,3 +189,4 @@ class Config: }, ] } + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py index 00f89d96d94..fd6acf554e0 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py @@ -1,4 +1,4 @@ -from typing import Any, ClassVar, Protocol, TypeAlias +from typing import Any, Protocol, TypeAlias from models_library.basic_types import EnvVarKey from models_library.docker import DockerLabelKey @@ -6,7 +6,7 @@ from models_library.projects_nodes_io import NodeID from models_library.services_resources import BootMode from models_library.users import UserID -from pydantic import AnyUrl, BaseModel, root_validator +from pydantic import AnyUrl, BaseModel, ConfigDict, model_validator from settings_library.s3 import S3Settings from .docker import DockerBasicAuth @@ -32,7 +32,7 @@ class TaskOwner(BaseModel): def has_parent(self) -> bool: return bool(self.parent_node_id and self.parent_project_id) - @root_validator + @model_validator(mode="before") @classmethod def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: parent_project_id = values.get("parent_project_id") @@ -44,8 +44,8 @@ def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: raise ValueError(msg) return values - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "user_id": 32, @@ -63,6 +63,7 @@ class Config: }, ] } + ) class ContainerTaskParameters(BaseModel): @@ -76,24 +77,23 @@ class ContainerTaskParameters(BaseModel): boot_mode: BootMode task_owner: TaskOwner - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "image": "ubuntu", "tag": "latest", - "input_data": TaskInputData.Config.schema_extra["examples"][0], - "output_data_keys": TaskOutputDataSchema.Config.schema_extra[ - "examples" - ][0], + "input_data": TaskInputData.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] + "output_data_keys": TaskOutputDataSchema.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] "command": ["sleep 10", "echo hello"], "envs": {"MYENV": "is an env"}, "labels": {"io.simcore.thelabel": "is amazing"}, "boot_mode": BootMode.CPU.value, - "task_owner": TaskOwner.Config.schema_extra["examples"][0], + "task_owner": TaskOwner.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] }, ] } + ) class ContainerRemoteFct(Protocol): diff --git a/packages/dask-task-models-library/tests/container_tasks/test_docker.py b/packages/dask-task-models-library/tests/container_tasks/test_docker.py index 307fe175547..4eb5bc74980 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_docker.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_docker.py @@ -4,7 +4,7 @@ @pytest.mark.parametrize("model_cls", [(DockerBasicAuth)]) def test_docker_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_events.py b/packages/dask-task-models-library/tests/container_tasks/test_events.py index 16a308e11e0..1aa4139720d 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_events.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_events.py @@ -26,7 +26,7 @@ def test_task_event_abstract(): @pytest.mark.parametrize("model_cls", [TaskProgressEvent, TaskLogEvent]) def test_events_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -51,7 +51,7 @@ def mocked_dask_worker_job_id(mocker: MockerFixture, job_id: str) -> str: return job_id -@pytest.fixture(params=TaskOwner.Config.schema_extra["examples"]) +@pytest.fixture(params=TaskOwner.model_config["json_schema_extra"]["examples"]) def task_owner(request: pytest.FixtureRequest) -> TaskOwner: return TaskOwner(**request.param) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_io.py b/packages/dask-task-models-library/tests/container_tasks/test_io.py index 14527d92391..db6357d930c 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_io.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_io.py @@ -30,7 +30,7 @@ def test_io_models_examples(model_cls, model_cls_examples): for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = model_cls.parse_obj(example) + model_instance = model_cls.model_validate(example) assert model_instance, f"Failed with {name}" print(name, ":", model_instance) @@ -69,9 +69,11 @@ def _create_fake_outputs( def test_create_task_output_from_task_with_optional_fields_as_required( tmp_path: Path, optional_fields_set: bool, faker: Faker ): - for schema_example in TaskOutputDataSchema.Config.schema_extra["examples"]: + for schema_example in TaskOutputDataSchema.model_config["json_schema_extra"][ + "examples" + ]: - task_output_schema = TaskOutputDataSchema.parse_obj(schema_example) + task_output_schema = TaskOutputDataSchema.model_validate(schema_example) outputs_file_name = _create_fake_outputs( task_output_schema, tmp_path, optional_fields_set, faker ) @@ -92,7 +94,7 @@ def test_create_task_output_from_task_with_optional_fields_as_required( def test_create_task_output_from_task_throws_when_there_are_missing_files( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "required_file_output": { "required": True, @@ -113,7 +115,7 @@ def test_create_task_output_from_task_throws_when_there_are_missing_files( def test_create_task_output_from_task_does_not_throw_when_there_are_optional_missing_files( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "optional_file_output": { "required": False, @@ -134,7 +136,7 @@ def test_create_task_output_from_task_does_not_throw_when_there_are_optional_mis def test_create_task_output_from_task_throws_when_there_are_entries( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "some_output": { "required": True, @@ -153,7 +155,7 @@ def test_create_task_output_from_task_throws_when_there_are_entries( def test_create_task_output_from_task_does_not_throw_when_there_are_optional_entries( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "some_output": { "required": False, @@ -182,6 +184,6 @@ def test_objects_are_compatible_with_dask_requirements(model_cls, model_cls_exam for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = model_cls.parse_obj(example) + model_instance = model_cls.model_validate(example) reloaded_instance = loads(dumps(model_instance)) assert reloaded_instance == model_instance diff --git a/packages/dask-task-models-library/tests/container_tasks/test_protocol.py b/packages/dask-task-models-library/tests/container_tasks/test_protocol.py index d17202adabd..3c70924a043 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_protocol.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_protocol.py @@ -9,7 +9,7 @@ @pytest.mark.parametrize("model_cls", [TaskOwner, ContainerTaskParameters]) def test_events_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -19,7 +19,9 @@ def test_events_models_examples(model_cls): def test_task_owner_parent_valid(faker: Faker): - invalid_task_owner_example = TaskOwner.Config.schema_extra["examples"][0] + invalid_task_owner_example = TaskOwner.model_config["json_schema_extra"][ + "examples" + ][0] invalid_task_owner_example["parent_project_id"] = faker.uuid4() assert invalid_task_owner_example["parent_node_id"] is None with pytest.raises(ValidationError, match=r".+ are None or both are set!"): diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index 03e1b39d677..f07b0ddd44b 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -20,13 +20,13 @@ orjson==3.10.7 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pydantic-extra-types==2.9.0 # via -r requirements/_base.in diff --git a/packages/models-library/scripts/validate-pg-projects.py b/packages/models-library/scripts/validate-pg-projects.py index 978e32cfc6f..648b6846876 100644 --- a/packages/models-library/scripts/validate-pg-projects.py +++ b/packages/models-library/scripts/validate-pg-projects.py @@ -59,12 +59,12 @@ def validate_csv_exported_pg_project( pid = row.get("uuid", index + 1) try: - model = ProjectFromCsv.parse_obj(row) + model = ProjectFromCsv.model_validate(row) if verbose > 1: typer.secho(f"{pid} OK", fg=typer.colors.GREEN) if verbose > 2: - typer.echo(model.json(indent=2)) + typer.echo(model.model_dump_json(indent=2)) except ValidationError as err: failed.append(pid) typer.secho( diff --git a/packages/models-library/src/models_library/api_schemas_catalog/__init__.py b/packages/models-library/src/models_library/api_schemas_catalog/__init__.py index 84d761729a4..2e8c8f75a24 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -CATALOG_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "catalog") +CATALOG_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "catalog" +) diff --git a/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py b/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py index b6570d01c89..79be28f2021 100644 --- a/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -CLUSTERS_KEEPER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "clusters-keeper" -) +CLUSTERS_KEEPER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("clusters-keeper") diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py index f0958695e15..d103a3ea8c5 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py @@ -33,7 +33,7 @@ class CommonServiceDetails(BaseModel): class ServiceDetails(CommonServiceDetails): - basepath: Path = Field( + basepath: Path | None = Field( default=None, description="predefined path where the dynamic service should be served. If empty, the service shall use the root endpoint.", alias="service_basepath", @@ -68,7 +68,7 @@ class RunningDynamicServiceDetails(ServiceDetails): internal_port: PortInt = Field( ..., description="the service swarm internal port", alias="service_port" ) - published_port: PortInt = Field( + published_port: PortInt | None = Field( default=None, description="the service swarm published port if any", deprecated=True, diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py index 5631d38e5f9..70a4f1247ba 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -DYNAMIC_SCHEDULER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "dynamic-scheduler" -) +DYNAMIC_SCHEDULER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("dynamic-scheduler") diff --git a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py index 50793febaf9..f47a9a3f8d3 100644 --- a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "efs-guardian" -) +EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("efs-guardian") diff --git a/packages/models-library/src/models_library/api_schemas_payments/__init__.py b/packages/models-library/src/models_library/api_schemas_payments/__init__.py index 30d68367ded..73928d6ccd7 100644 --- a/packages/models-library/src/models_library/api_schemas_payments/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_payments/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -PAYMENTS_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "payments") +PAYMENTS_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "payments" +) diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py index 295897e5b1d..d32b474edf6 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -RESOURCE_USAGE_TRACKER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "resource-usage-tracker" -) +RESOURCE_USAGE_TRACKER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("resource-usage-tracker") diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py index fac13d42d40..bd0185a9173 100644 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ b/packages/models-library/src/models_library/api_schemas_storage.py @@ -37,10 +37,11 @@ ETag: TypeAlias = str - S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)] -DatCoreDatasetName: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE)] +DatCoreDatasetName: TypeAlias = Annotated[ + str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE) +] # / diff --git a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py index f30e0f0790d..c95f68ab78c 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "webserver") +WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("webserver") diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index cda166e0d13..02fabd46f7a 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -103,7 +103,7 @@ class NodeGet(OutputSchema): "service_basepath": "/x/E1O2E-LAH", "service_state": "pending", "service_message": "no suitable node (insufficient resources on 1 node)", - "user_id": 123, + "user_id": "123", } } ) diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index 51441fe39e6..b65c0fd1fe1 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -49,7 +49,9 @@ SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)\/$" # S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html] -S3_BUCKET_NAME_RE = r"(?!(^xn--|-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$" +S3_BUCKET_NAME_RE = re.compile( + r"^(?!xn--)[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$(? "DockerLabelKey": # NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description -DockerGenericTag: TypeAlias = Annotated[str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE)] +DockerGenericTag: TypeAlias = Annotated[ + str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE) +] class DockerPlacementConstraint(ConstrainedStr): @@ -139,7 +141,7 @@ def to_simcore_runtime_docker_labels(self) -> dict[DockerLabelKey, str]: """returns a dictionary of strings as required by docker""" return { to_simcore_runtime_docker_label_key(k): f"{v}" - for k, v in sorted(self.dict().items()) + for k, v in sorted(self.model_dump().items()) } @classmethod diff --git a/packages/models-library/src/models_library/function_services_catalog/_utils.py b/packages/models-library/src/models_library/function_services_catalog/_utils.py index 4cd1275b5e0..a58a524d094 100644 --- a/packages/models-library/src/models_library/function_services_catalog/_utils.py +++ b/packages/models-library/src/models_library/function_services_catalog/_utils.py @@ -14,10 +14,10 @@ "email": "unknown@osparc.io", "affiliation": "unknown", } -EN = Author.parse_obj(AUTHORS.get("EN", _DEFAULT)) -OM = Author.parse_obj(AUTHORS.get("OM", _DEFAULT)) -PC = Author.parse_obj(AUTHORS.get("PC", _DEFAULT)) -WVG = Author.parse_obj(AUTHORS.get("WVG", _DEFAULT)) +EN = Author.model_validate(AUTHORS.get("EN", _DEFAULT)) +OM = Author.model_validate(AUTHORS.get("OM", _DEFAULT)) +PC = Author.model_validate(AUTHORS.get("PC", _DEFAULT)) +WVG = Author.model_validate(AUTHORS.get("WVG", _DEFAULT)) def create_fake_thumbnail_url(label: str) -> str: diff --git a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py index 298ac02c82b..44bd30e0899 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py @@ -15,7 +15,7 @@ # If this assumption cannot be guaranteed anymore the test must be updated. # -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/demo-units", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py index 0e0554842fb..2245a8ba3ff 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py @@ -8,7 +8,7 @@ from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices -META: Final = ServiceMetaDataPublished.parse_obj( +META: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/file-picker", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py index 662cbf327cf..d59e37735e8 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py @@ -12,7 +12,7 @@ def create_metadata( ) -> ServiceMetaDataPublished: prefix = prefix or type_name LABEL = f"{type_name.capitalize()} iterator" - return ServiceMetaDataPublished.parse_obj( + return ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/{prefix}-range", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py index f0199389885..a2be976c651 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py @@ -13,7 +13,7 @@ LIST_NUMBERS_SCHEMA: dict[str, Any] = schema_of(list[float], title="list[number]") -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/sensitivity", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py index bfde87e52c3..40adb28f342 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py @@ -7,7 +7,7 @@ # NOTE: DO not mistake with simcore/services/frontend/nodes-group/macros/ # which needs to be redefined. # -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/nodes-group", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py index e0e25b6ee11..d62a4a88dfb 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py @@ -12,7 +12,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: This is a parametrized node (or param-node in short) """ - meta = ServiceMetaDataPublished.parse_obj( + meta = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/{type_name}", @@ -45,7 +45,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: META_BOOL: Final = _create_metadata(type_name="boolean") META_INT: Final = _create_metadata(type_name="integer") META_STR: Final = _create_metadata(type_name="string") -META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/array", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/probes.py b/packages/models-library/src/models_library/function_services_catalog/services/probes.py index e736efb2fb1..4c710a90ade 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/probes.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/probes.py @@ -7,7 +7,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: - obj: ServiceMetaDataPublished = ServiceMetaDataPublished.parse_obj( + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/{type_name}", @@ -38,7 +38,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: META_BOOL: Final = _create_metadata("boolean") META_INT: Final = _create_metadata("integer") META_STR: Final = _create_metadata("string") -META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/array", @@ -67,7 +67,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: } ) -META_FILE: Final = ServiceMetaDataPublished.parse_obj( +META_FILE: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/file", diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 440bfe915bb..120e54d899d 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -1,7 +1,6 @@ """ Models a study's project document """ -from copy import deepcopy from datetime import datetime from enum import Enum from typing import Any, Final, TypeAlias @@ -172,16 +171,7 @@ class Project(BaseProjectModel): alias="workspaceId", ) - def _patch_json_schema_extra(self, schema: dict) -> None: - # Patch to allow jsonschema nullable - # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530 - state_pydantic_schema = deepcopy(schema["properties"]["state"]) - schema["properties"]["state"] = { - "anyOf": [{"type": "null"}, state_pydantic_schema] - } - model_config = ConfigDict( title="osparc-simcore project", extra="forbid", - json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item] ) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index b074dd4dec6..3a6ea052313 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -113,6 +113,16 @@ class NodeState(BaseModel): ) +def _patch_json_schema_extra(schema: dict) -> None: + # NOTE: exporting without this trick does not make runHash as nullable. + # It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270 + for prop_name in ["parent", "runHash"]: + if prop_name in schema.get("properties", {}): + prop = deepcopy(schema["properties"][prop_name]) + prop["nullable"] = True + schema["properties"][prop_name] = prop + + class Node(BaseModel): key: ServiceKey = Field( ..., @@ -234,16 +244,7 @@ def convert_from_enum(cls, v): return NodeState(currentStatus=running_state_value) return v - def _patch_json_schema_extra(self, schema: dict) -> None: - # NOTE: exporting without this trick does not make runHash as nullable. - # It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270 - for prop_name in ["parent", "runHash"]: - if prop_name in schema.get("properties", {}): - prop = deepcopy(schema["properties"][prop_name]) - prop["nullable"] = True - schema["properties"][prop_name] = prop - model_config = ConfigDict( extra="forbid", - json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item] + json_schema_extra=_patch_json_schema_extra, ) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 876c2f71744..3a79b6acf00 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -6,7 +6,6 @@ - Link to another port: PortLink """ -import re from pathlib import Path from typing import Annotated, TypeAlias from uuid import UUID @@ -15,6 +14,7 @@ from pydantic import ( AnyUrl, BaseModel, + BeforeValidator, ConfigDict, Field, StringConstraints, @@ -40,8 +40,9 @@ LocationName = str -class SimcoreS3FileID(ConstrainedStr): - pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_FILE_ID_RE) +SimcoreS3FileID: TypeAlias = Annotated[ + str, StringConstraints(pattern=SIMCORE_S3_FILE_ID_RE) +] class SimcoreS3DirectoryID(ConstrainedStr): @@ -50,7 +51,7 @@ class SimcoreS3DirectoryID(ConstrainedStr): `{project_id}/{node_id}/simcore-dir-name/` """ - pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_DIRECTORY_ID_RE) + pattern: str = SIMCORE_S3_DIRECTORY_ID_RE @staticmethod def _get_parent(s3_object: str, *, parent_index: int) -> str: @@ -87,9 +88,7 @@ def from_simcore_s3_object(cls, s3_object: str) -> "SimcoreS3DirectoryID": return TypeAdapter(cls).validate_python(f"{parent_path}/") -class DatCoreFileID(ConstrainedStr): - regex: re.Pattern[str] | None = re.compile(DATCORE_FILE_ID_RE) - +DatCoreFileID: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_FILE_ID_RE)] StorageFileID: TypeAlias = SimcoreS3FileID | DatCoreFileID @@ -123,7 +122,9 @@ class PortLink(BaseModel): class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" - download_link: Annotated[str, AnyUrl] = Field(..., alias="downloadLink") + download_link: Annotated[ + str, BeforeValidator(lambda x: str(TypeAdapter(AnyUrl).validate_python(x))) + ] = Field(..., alias="downloadLink") label: str | None = Field(default=None, description="Display name") model_config = ConfigDict( extra="forbid", @@ -145,11 +146,13 @@ class BaseFileLink(BaseModel): store: LocationID = Field( ..., description="The store identifier: 0 for simcore S3, 1 for datcore", + validate_default=True, ) path: StorageFileID = Field( ..., description="The path to the file in the storage provider domain", + union_mode="left_to_right", ) label: str | None = Field( @@ -170,6 +173,8 @@ def legacy_enforce_str_to_int(cls, v): return int(v) return v + model_config = ConfigDict(populate_by_name=True) + class SimCoreFileLink(BaseFileLink): """I/O port type to hold a link to a file in simcore S3 storage""" diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index 13ecda316ae..11e0ad55796 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -46,7 +46,7 @@ def routing_key(self) -> str | None: """ def body(self) -> bytes: - return self.json().encode() + return self.model_dump_json().encode() class ProjectMessageBase(BaseModel): diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index d8e3b9990b3..63b4e4948b0 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -1,8 +1,9 @@ from typing import Annotated, Final, Generic, TypeAlias, TypeVar +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from pydantic import ( - AnyHttpUrl, BaseModel, + BeforeValidator, ConfigDict, Field, NonNegativeInt, @@ -20,7 +21,9 @@ MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE: Final[int] = 50 -PageLimitInt: TypeAlias = Annotated[int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE)] +PageLimitInt: TypeAlias = Annotated[ + int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE) +] DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = TypeAdapter( PageLimitInt @@ -92,7 +95,14 @@ class PageRefs(BaseModel, Generic[RefT]): model_config = ConfigDict(extra="forbid") -class PageLinks(PageRefs[Annotated[str, AnyHttpUrl]]): +class PageLinks( + PageRefs[ + Annotated[ + str, + BeforeValidator(lambda x: str(AnyHttpUrlLegacyAdapter.validate_python(x))), + ] + ] +): ... diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index 8d901d50f30..1adfc5625c6 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -1,9 +1,8 @@ from math import ceil from typing import Any, Protocol, TypedDict, Union, runtime_checkable -from pydantic import parse_obj_as +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter -from .basic_types import AnyHttpUrl from .rest_pagination import PageLinks, PageMetaInfoLimitOffset # NOTE: In this repo we use two type of URL-like data structures: @@ -39,7 +38,9 @@ def _replace_query(url: _URLType, query: dict[str, Any]) -> str: new_url = url.update_query(query) else: new_url = url.replace_query_params(**query) - return f"{new_url}" + + new_url_str = f"{new_url}" + return f"{AnyHttpUrlLegacyAdapter.validate_python(new_url_str)}" class PageDict(TypedDict): @@ -61,7 +62,7 @@ def paginate_data( Usage: obj: PageDict = paginate_data( ... ) - model = Page[MyModelItem].parse_obj(obj) + model = Page[MyModelItem].model_validate(obj) raises ValidationError """ @@ -72,37 +73,21 @@ def paginate_data( total=total, count=len(chunk), limit=limit, offset=offset ), _links=PageLinks( - self=( - parse_obj_as( - AnyHttpUrl, - _replace_query(request_url, {"offset": offset, "limit": limit}), - ) - ), - first=parse_obj_as( - AnyHttpUrl, _replace_query(request_url, {"offset": 0, "limit": limit}) - ), - prev=parse_obj_as( - AnyHttpUrl, - _replace_query( - request_url, {"offset": max(offset - limit, 0), "limit": limit} - ), + self=_replace_query(request_url, {"offset": offset, "limit": limit}), + first=_replace_query(request_url, {"offset": 0, "limit": limit}), + prev=_replace_query( + request_url, {"offset": max(offset - limit, 0), "limit": limit} ) if offset > 0 else None, - next=parse_obj_as( - AnyHttpUrl, - _replace_query( - request_url, - {"offset": min(offset + limit, last_page * limit), "limit": limit}, - ), + next=_replace_query( + request_url, + {"offset": min(offset + limit, last_page * limit), "limit": limit}, ) if offset < (last_page * limit) else None, - last=parse_obj_as( - AnyHttpUrl, - _replace_query( - request_url, {"offset": last_page * limit, "limit": limit} - ), + last=_replace_query( + request_url, {"offset": last_page * limit, "limit": limit} ), ), data=chunk, diff --git a/packages/models-library/src/models_library/rpc_pagination.py b/packages/models-library/src/models_library/rpc_pagination.py index 0ec454cc9fd..92470b30d67 100644 --- a/packages/models-library/src/models_library/rpc_pagination.py +++ b/packages/models-library/src/models_library/rpc_pagination.py @@ -31,7 +31,7 @@ class PageRefsParams(PageRefs[PageQueryParameters]): @classmethod def create(cls, total: int, limit: int, offset: int) -> "PageRefsParams": last_page = ceil(total / limit) - 1 - return cls.parse_obj( + return cls.model_validate( { "self": {"offset": offset, "limit": limit}, "first": {"offset": 0, "limit": limit}, diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 8f954737231..851b1880cc3 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -12,11 +12,11 @@ Field, Json, PrivateAttr, + TypeAdapter, ValidationError, ValidationInfo, field_validator, model_validator, - parse_obj_as, ) from .callbacks_mapping import CallbacksMapping @@ -43,8 +43,7 @@ class ContainerSpec(BaseModel): max_length=2, ) - model_config = ConfigDict( - **_BaseConfig, + model_config = _BaseConfig | ConfigDict( json_schema_extra={ "examples": [ {"Command": ["executable"]}, @@ -102,8 +101,7 @@ def ensure_backwards_compatible_setting_type(cls, v): return "Resources" return v - model_config = ConfigDict( - **_BaseConfig, + model_config = _BaseConfig | ConfigDict( populate_by_name=True, json_schema_extra={ "examples": [ @@ -203,7 +201,7 @@ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None: for path_str, size_str in v.items(): # checks that format is correct try: - parse_obj_as(ByteSize, size_str) + TypeAdapter(ByteSize).validate_python(size_str) except ValidationError as e: msg = f"Provided size='{size_str}' contains invalid charactes: {e!s}" raise ValueError(msg) from e @@ -221,8 +219,7 @@ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None: output: str | None = v return output - model_config = ConfigDict( - **_BaseConfig, + model_config = _BaseConfig | ConfigDict( json_schema_extra={ "examples": [ { diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py index ee937c81254..1f50b62f503 100644 --- a/packages/models-library/src/models_library/service_settings_nat_rule.py +++ b/packages/models-library/src/models_library/service_settings_nat_rule.py @@ -1,21 +1,14 @@ from collections.abc import Generator from typing import Final -from pydantic import ( - BaseModel, - ConfigDict, - Field, - ValidationInfo, - field_validator, - parse_obj_as, -) +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, ValidationInfo, field_validator from .basic_types import PortInt from .osparc_variable_identifier import OsparcVariableIdentifier, raise_if_unresolved # Cloudflare DNS server address DEFAULT_DNS_SERVER_ADDRESS: Final[str] = "1.1.1.1" # NOSONAR -DEFAULT_DNS_SERVER_PORT: Final[PortInt] = parse_obj_as(PortInt, 53) +DEFAULT_DNS_SERVER_PORT: Final[PortInt] = TypeAdapter(PortInt).validate_python(53) class _PortRange(BaseModel): diff --git a/packages/models-library/src/models_library/services_io.py b/packages/models-library/src/models_library/services_io.py index 49264f19799..6f1acd3d494 100644 --- a/packages/models-library/src/models_library/services_io.py +++ b/packages/models-library/src/models_library/services_io.py @@ -211,7 +211,7 @@ class ServiceInput(BaseServiceIOModel): def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceInput": """Creates input port model from a json-schema""" data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) + return cls.model_validate(data) class ServiceOutput(BaseServiceIOModel): @@ -258,4 +258,4 @@ class ServiceOutput(BaseServiceIOModel): def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceOutput": """Creates output port model from a json-schema""" data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) + return cls.model_validate(data) diff --git a/packages/models-library/src/models_library/utils/json_serialization.py b/packages/models-library/src/models_library/utils/json_serialization.py index 69ffb00572d..a2fee1295f1 100644 --- a/packages/models-library/src/models_library/utils/json_serialization.py +++ b/packages/models-library/src/models_library/utils/json_serialization.py @@ -56,8 +56,8 @@ def decimal_encoder(dec_value: Decimal) -> int | float: """ if dec_value.as_tuple().exponent >= 0: # type: ignore[operator] return int(dec_value) - else: - return float(dec_value) + + return float(dec_value) ENCODERS_BY_TYPE: dict[type[Any], Callable[[Any], Any]] = { @@ -95,7 +95,8 @@ def pydantic_encoder(obj: Any) -> Any: if isinstance(obj, BaseModel): return obj.model_dump() - elif is_dataclass(obj): + + if is_dataclass(obj): return asdict(obj) # type: ignore[call-overload] # Check the class type and its superclasses for a matching encoder diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index 1def98ec507..dd791677d19 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -5,7 +5,7 @@ from copy import deepcopy from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter from ..projects import Project from ..projects_nodes_io import NodeID, PortLink, UUIDStr @@ -20,7 +20,7 @@ def project_node_io_payload_cb( async def node_io_payload_cb(node_id: NodeID) -> dict[str, Any]: node_io_payload: dict[str, Any] = {"inputs": None, "outputs": None} - node = project.workbench.get(UUIDStr(node_id)) + node = project.workbench.get(TypeAdapter(UUIDStr).validate_python(node_id)) if node: node_io_payload = {"inputs": node.inputs, "outputs": node.outputs} @@ -58,7 +58,7 @@ async def compute_node_hash( # ensure we do not get pydantic types for hashing here, only jsoneable stuff if isinstance(payload, BaseModel): - payload = payload.dict(by_alias=True, exclude_unset=True) + payload = payload.model_dump(by_alias=True, exclude_unset=True) # remove the payload if it is null and it was resolved if payload is not None: diff --git a/packages/models-library/tests/test__models_examples.py b/packages/models-library/tests/test__models_examples.py index 12809db713b..2345b5451f1 100644 --- a/packages/models-library/tests/test__models_examples.py +++ b/packages/models-library/tests/test__models_examples.py @@ -14,6 +14,6 @@ def test_all_models_library_models_config_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/packages/models-library/tests/test__pydantic_models.py b/packages/models-library/tests/test__pydantic_models.py index 548d34f6569..645dc1ffe21 100644 --- a/packages/models-library/tests/test__pydantic_models.py +++ b/packages/models-library/tests/test__pydantic_models.py @@ -50,7 +50,7 @@ class ArgumentAnnotation(BaseModel): "items": {"type": "integer"}, } - assert x_annotation.dict() == { + assert x_annotation.model_dump() == { "name": "x", "data_schema": { "title": "schema[x]", @@ -64,7 +64,7 @@ class ArgumentAnnotation(BaseModel): # # the constructor would expect a raw string but we produced a nested dict with pytest.raises(ValidationError) as exc_info: - ArgumentAnnotation(**x_annotation.dict()) + ArgumentAnnotation(**x_annotation.model_dump()) assert exc_info.value.errors()[0] == { "input": {"items": {"type": "integer"}, "title": "schema[x]", "type": "array"}, @@ -147,7 +147,7 @@ class Func(BaseModel): assert model.input == {"w": 42, "z": False} assert model.output == "some/path/or/string" - # (undefined) json string vs SimCoreFileLink.dict() ------------ + # (undefined) json string vs SimCoreFileLink.model_dump() ------------ MINIMAL = 2 # <--- index of the example with the minimum required fields assert SimCoreFileLink in get_args(OutputTypes) example = SimCoreFileLink.model_validate( diff --git a/packages/models-library/tests/test__pydantic_models_and_enums.py b/packages/models-library/tests/test__pydantic_models_and_enums.py index 51f0226ee80..00c67c32c9b 100644 --- a/packages/models-library/tests/test__pydantic_models_and_enums.py +++ b/packages/models-library/tests/test__pydantic_models_and_enums.py @@ -2,7 +2,7 @@ import pytest from models_library.utils.enums import are_equivalent_enums, enum_to_dict -from pydantic import BaseModel, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError # @@ -76,16 +76,16 @@ class Model(BaseModel): def test_parsing_enums_in_pydantic(): - model = parse_obj_as(Model, {"color": Color1.RED}) + model = TypeAdapter(Model).validate_python({"color": Color1.RED}) assert model.color == Color1.RED # Can parse from STRING - model = parse_obj_as(Model, {"color": "RED"}) + model = TypeAdapter(Model).validate_python({"color": "RED"}) assert model.color == Color1.RED # Can **NOT** parse from equilalent enum with pytest.raises(ValidationError): - parse_obj_as(Model, {"color": Color2.RED}) + TypeAdapter(Model).validate_python({"color": Color2.RED}) class ModelStrAndEnum(BaseModel): @@ -95,30 +95,32 @@ class ModelStrAndEnum(BaseModel): def test_parsing_strenum_in_pydantic(): assert are_equivalent_enums(Color1, ColorStrAndEnum1) - model = parse_obj_as(ModelStrAndEnum, {"color": ColorStrAndEnum1.RED}) + model = TypeAdapter(ModelStrAndEnum).validate_python( + {"color": ColorStrAndEnum1.RED} + ) assert model.color == ColorStrAndEnum1.RED # Can parse from string - model = parse_obj_as(ModelStrAndEnum, {"color": "RED"}) + model = TypeAdapter(ModelStrAndEnum).validate_python({"color": "RED"}) assert model.color == ColorStrAndEnum1.RED # **CAN** parse other equivalent str-enum # Using str-enums allow you to parse from equivalent enums! - parse_obj_as(ModelStrAndEnum, {"color": ColorStrAndEnum2.RED}) + TypeAdapter(ModelStrAndEnum).validate_python({"color": ColorStrAndEnum2.RED}) def test_parsing_str_and_enum_in_pydantic(): - # Can still NOT parse equilalent enum(-only) + # Can still NOT parse equivalent enum(-only) # with pytest.raises(ValidationError): - # parse_obj_as(ModelStrAndEnum, {"color": Color1.RED}) + # TypeAdapter(ModelStrAndEnum).validate_python({"color": Color1.RED}) # And the opposite? NO!!! with pytest.raises(ValidationError): - parse_obj_as(Color1, {"color": ColorStrAndEnum1.RED}) + TypeAdapter(Color1).validate_python({"color": ColorStrAndEnum1.RED}) with pytest.raises(ValidationError): - parse_obj_as(Color1, {"color": ColorStrAndEnum2.RED}) + TypeAdapter(Color1).validate_python({"color": ColorStrAndEnum2.RED}) # CONCLUSION: we need a validator to pre-process inputs ! # SEE models_library.utils.common_validators diff --git a/packages/models-library/tests/test_api_schemas_catalog.py b/packages/models-library/tests/test_api_schemas_catalog.py index 0c815d7bd0c..721f27481e2 100644 --- a/packages/models-library/tests/test_api_schemas_catalog.py +++ b/packages/models-library/tests/test_api_schemas_catalog.py @@ -9,7 +9,7 @@ def test_service_port_with_file(): - io = ServiceInput.parse_obj( + io = ServiceInput.model_validate( { "displayOrder": 1, "label": "Input files", @@ -21,7 +21,7 @@ def test_service_port_with_file(): } ) - port = ServicePortGet.from_service_io("input", "input_1", io).dict( + port = ServicePortGet.from_service_io("input", "input_1", io).model_dump( exclude_unset=True ) @@ -39,7 +39,7 @@ def test_service_port_with_file(): def test_service_port_with_boolean(): - io = ServiceInput.parse_obj( + io = ServiceInput.model_validate( { "displayOrder": 3, "label": "Same title and description is more usual than you might think", @@ -49,7 +49,7 @@ def test_service_port_with_boolean(): } ) - port = ServicePortGet.from_service_io("input", "input_1", io).dict( + port = ServicePortGet.from_service_io("input", "input_1", io).model_dump( exclude_unset=True ) diff --git a/packages/models-library/tests/test_api_schemas_webserver_projects.py b/packages/models-library/tests/test_api_schemas_webserver_projects.py index b8e4fcbdc47..295e9ee2304 100644 --- a/packages/models-library/tests/test_api_schemas_webserver_projects.py +++ b/packages/models-library/tests/test_api_schemas_webserver_projects.py @@ -14,7 +14,7 @@ ) from models_library.generics import Envelope from models_library.rest_pagination import Page -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.simcore_webserver_projects_rest_api import ( CREATE_FROM_SERVICE, CREATE_FROM_TEMPLATE, @@ -34,12 +34,12 @@ ids=lambda c: c.name, ) def test_create_project_schemas(api_call: HttpApiCallCapture): - request_payload = ProjectCreateNew.parse_obj(api_call.request_payload) + request_payload = ProjectCreateNew.model_validate(api_call.request_payload) assert request_payload - response_body = parse_obj_as( - Envelope[ProjectGet] | Envelope[TaskProjectGet], api_call.response_body - ) + response_body = TypeAdapter( + Envelope[ProjectGet] | Envelope[TaskProjectGet] + ).validate_python(api_call.response_body) assert response_body @@ -51,7 +51,9 @@ def test_create_project_schemas(api_call: HttpApiCallCapture): def test_list_project_schemas(api_call: HttpApiCallCapture): assert api_call.request_payload is None - response_body = parse_obj_as(Page[ProjectListItem], api_call.response_body) + response_body = TypeAdapter(Page[ProjectListItem]).validate_python( + api_call.response_body + ) assert response_body @@ -64,7 +66,9 @@ def test_get_project_schemas(api_call: HttpApiCallCapture): # NOTE: that response_body here is the exported values # and therefore ProjectGet has to be implemented in such a way that # can also parse exported values! (e.g. Json does not allow that, or ocassionaly exclude_none) - response_body = parse_obj_as(Envelope[ProjectGet], api_call.response_body) + response_body = TypeAdapter(Envelope[ProjectGet]).validate_python( + api_call.response_body + ) assert response_body @@ -74,8 +78,12 @@ def test_get_project_schemas(api_call: HttpApiCallCapture): ids=lambda c: c.name, ) def test_replace_project_schemas(api_call: HttpApiCallCapture): - request_payload = parse_obj_as(ProjectReplace, api_call.request_payload) + request_payload = TypeAdapter(ProjectReplace).validate_python( + api_call.request_payload + ) assert request_payload - response_body = parse_obj_as(Envelope[ProjectGet], api_call.response_body) + response_body = TypeAdapter(Envelope[ProjectGet]).validate_python( + api_call.response_body + ) assert response_body diff --git a/packages/models-library/tests/test_callbacks_mapping.py b/packages/models-library/tests/test_callbacks_mapping.py index e1c0df003c6..e39db6367ad 100644 --- a/packages/models-library/tests/test_callbacks_mapping.py +++ b/packages/models-library/tests/test_callbacks_mapping.py @@ -6,7 +6,7 @@ TIMEOUT_MIN, CallbacksMapping, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError def _format_with_timeout(timeout: float) -> dict[str, Any]: @@ -20,8 +20,10 @@ def test_inactivity_time_out_is_max_capped(): INACTIVITY_TIMEOUT_CAP - 1, INACTIVITY_TIMEOUT_CAP, ]: - parse_obj_as(CallbacksMapping, _format_with_timeout(in_bounds)) + TypeAdapter(CallbacksMapping).validate_python(_format_with_timeout(in_bounds)) for out_of_bounds in [INACTIVITY_TIMEOUT_CAP + 1, TIMEOUT_MIN - 1]: with pytest.raises(ValidationError): - parse_obj_as(CallbacksMapping, _format_with_timeout(out_of_bounds)) + TypeAdapter(CallbacksMapping).validate_python( + _format_with_timeout(out_of_bounds) + ) diff --git a/packages/models-library/tests/test_docker.py b/packages/models-library/tests/test_docker.py index 55cb9419bbc..dd5fed89951 100644 --- a/packages/models-library/tests/test_docker.py +++ b/packages/models-library/tests/test_docker.py @@ -13,7 +13,7 @@ DockerLabelKey, StandardSimcoreDockerLabels, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError _faker = Faker() @@ -40,11 +40,11 @@ def test_docker_label_key(label_key: str, valid: bool): # NOTE: https://docs.docker.com/config/labels-custom-metadata/#key-format-recommendations if valid: - instance = parse_obj_as(DockerLabelKey, label_key) + instance = TypeAdapter(DockerLabelKey).validate_python(label_key) assert instance else: with pytest.raises(ValidationError): - parse_obj_as(DockerLabelKey, label_key) + TypeAdapter(DockerLabelKey).validate_python(label_key) @pytest.mark.parametrize( @@ -94,11 +94,11 @@ def test_docker_label_key(label_key: str, valid: bool): ) def test_docker_generic_tag(image_name: str, valid: bool): if valid: - instance = parse_obj_as(DockerGenericTag, image_name) + instance = TypeAdapter(DockerGenericTag).validate_python(image_name) assert instance else: with pytest.raises(ValidationError): - parse_obj_as(DockerGenericTag, image_name) + TypeAdapter(DockerGenericTag).validate_python(image_name) @pytest.mark.parametrize( @@ -107,7 +107,9 @@ def test_docker_generic_tag(image_name: str, valid: bool): ids=str, ) def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): - simcore_service_docker_label_keys = StandardSimcoreDockerLabels.parse_obj(obj_data) + simcore_service_docker_label_keys = StandardSimcoreDockerLabels.model_validate( + obj_data + ) exported_dict = simcore_service_docker_label_keys.to_simcore_runtime_docker_labels() assert all( isinstance(v, str) for v in exported_dict.values() @@ -115,8 +117,8 @@ def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): assert all( key.startswith(_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX) for key in exported_dict ) - re_imported_docker_label_keys = parse_obj_as( - StandardSimcoreDockerLabels, exported_dict - ) + re_imported_docker_label_keys = TypeAdapter( + StandardSimcoreDockerLabels + ).validate_python(exported_dict) assert re_imported_docker_label_keys assert simcore_service_docker_label_keys == re_imported_docker_label_keys diff --git a/packages/models-library/tests/test_generics.py b/packages/models-library/tests/test_generics.py index b778cd4a490..f94436f1214 100644 --- a/packages/models-library/tests/test_generics.py +++ b/packages/models-library/tests/test_generics.py @@ -20,7 +20,7 @@ def test_dict_base_model(): "another key": "a string value", "yet another key": Path("some_path"), } - some_instance = DictModel[str, Any].parse_obj(some_dict) + some_instance = DictModel[str, Any].model_validate(some_dict) assert some_instance # test some typical dict methods @@ -78,10 +78,10 @@ def test_enveloped_data_builtin(builtin_type: type, builtin_value: Any): assert envelope == Envelope[builtin_type].from_data(builtin_value) # exports - assert envelope.dict(exclude_unset=True, exclude_none=True) == { + assert envelope.model_dump(exclude_unset=True, exclude_none=True) == { "data": builtin_value } - assert envelope.dict() == {"data": builtin_value, "error": None} + assert envelope.model_dump() == {"data": builtin_value, "error": None} def test_enveloped_data_model(): @@ -92,7 +92,9 @@ class User(BaseModel): enveloped = Envelope[User](data={"idr": 3}) assert isinstance(enveloped.data, User) - assert enveloped.dict(exclude_unset=True, exclude_none=True) == {"data": {"idr": 3}} + assert enveloped.model_dump(exclude_unset=True, exclude_none=True) == { + "data": {"idr": 3} + } def test_enveloped_data_dict(): diff --git a/packages/models-library/tests/test_osparc_variable_identifier.py b/packages/models-library/tests/test_osparc_variable_identifier.py index 18b48c299bd..cb23b19f60a 100644 --- a/packages/models-library/tests/test_osparc_variable_identifier.py +++ b/packages/models-library/tests/test_osparc_variable_identifier.py @@ -10,7 +10,7 @@ raise_if_unresolved_osparc_variable_identifier_found, replace_osparc_variable_identifier, ) -from pydantic import BaseModel, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError VALID_IDENTIFIERS: list[str] = [ "$OSPARC_VARIABLE_One121_", @@ -41,6 +41,11 @@ ] +_OSPARC_VARIABLE_IDENTIFIER_ADAPTER: TypeAdapter[ + OsparcVariableIdentifier +] = TypeAdapter(OsparcVariableIdentifier) + + @pytest.fixture(params=VALID_IDENTIFIERS) def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: return request.param @@ -50,13 +55,15 @@ def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: def identifier( osparc_variable_identifier_str: str, ) -> OsparcVariableIdentifier: - return parse_obj_as(OsparcVariableIdentifier, osparc_variable_identifier_str) + return _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + osparc_variable_identifier_str + ) @pytest.mark.parametrize("invalid_var_name", INVALID_IDENTIFIERS) def test_osparc_variable_identifier_does_not_validate(invalid_var_name: str): with pytest.raises(ValidationError): - parse_obj_as(OsparcVariableIdentifier, invalid_var_name) + _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python(invalid_var_name) def test_raise_if_unresolved(identifier: OsparcVariableIdentifier): @@ -76,13 +83,19 @@ class Example(BaseModel): @pytest.mark.parametrize( "object_template", [ - parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1"), - [parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")], - (parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1"),), - {parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")}, - {"test": parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")}, + _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1"), + [_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1")], + (_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1"),), + {_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1")}, + { + "test": _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + "$OSPARC_VARIABLE_1" + ) + }, Example( - nested_objects=parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1") + nested_objects=_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + "$OSPARC_VARIABLE_1" + ) ), ], ) @@ -147,6 +160,8 @@ def test_osparc_variable_name_and_default_value( expected_osparc_variable_name: str, expected_default_value: str | None, ): - osparc_variable_identifer = parse_obj_as(OsparcVariableIdentifier, str_identifier) + osparc_variable_identifer = _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( + str_identifier + ) assert osparc_variable_identifer.name == expected_osparc_variable_name assert osparc_variable_identifer.default_value == expected_default_value diff --git a/packages/models-library/tests/test_project_networks.py b/packages/models-library/tests/test_project_networks.py index c91f0503a8e..a929ac2a0aa 100644 --- a/packages/models-library/tests/test_project_networks.py +++ b/packages/models-library/tests/test_project_networks.py @@ -7,7 +7,7 @@ DockerNetworkName, NetworksWithAliases, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError @pytest.mark.parametrize( @@ -19,7 +19,7 @@ ], ) def test_networks_with_aliases_ok(valid_example: dict) -> None: - assert NetworksWithAliases.parse_obj(valid_example) + assert NetworksWithAliases.model_validate(valid_example) @pytest.mark.parametrize( @@ -39,26 +39,26 @@ def test_networks_with_aliases_ok(valid_example: dict) -> None: ) def test_networks_with_aliases_fail(invalid_example: dict) -> None: with pytest.raises(ValidationError): - assert NetworksWithAliases.parse_obj(invalid_example) + assert NetworksWithAliases.model_validate(invalid_example) @pytest.mark.parametrize("network_name", ["a", "ok", "a_", "A_", "a1", "a-"]) def test_projects_networks_validation(network_name: str) -> None: - assert parse_obj_as(DockerNetworkName, network_name) == network_name - assert parse_obj_as(DockerNetworkAlias, network_name) == network_name + assert TypeAdapter(DockerNetworkName).validate_python(network_name) == network_name + assert TypeAdapter(DockerNetworkAlias).validate_python(network_name) == network_name @pytest.mark.parametrize("network_name", ["", "1", "-", "_"]) def test_projects_networks_validation_fails(network_name: str) -> None: with pytest.raises(ValidationError): - parse_obj_as(DockerNetworkName, network_name) + TypeAdapter(DockerNetworkName).validate_python(network_name) with pytest.raises(ValidationError): - parse_obj_as(DockerNetworkAlias, network_name) + TypeAdapter(DockerNetworkAlias).validate_python(network_name) def test_class_constructors_fail() -> None: with pytest.raises(ValidationError): - NetworksWithAliases.parse_obj( + NetworksWithAliases.model_validate( { "ok-netowrk_naeme": { UUID( diff --git a/packages/models-library/tests/test_project_nodes.py b/packages/models-library/tests/test_project_nodes.py index 2edefd1533d..96f427a19cb 100644 --- a/packages/models-library/tests/test_project_nodes.py +++ b/packages/models-library/tests/test_project_nodes.py @@ -31,7 +31,7 @@ def test_create_minimal_node(minimal_node_data_sample: dict[str, Any]): assert node.parent is None assert node.progress is None - assert node.dict(exclude_unset=True) == minimal_node_data_sample + assert node.model_dump(exclude_unset=True) == minimal_node_data_sample def test_create_minimal_node_with_new_data_type( @@ -69,4 +69,4 @@ def test_backwards_compatibility_node_data(minimal_node_data_sample: dict[str, A assert node.state.modified is True assert node.state.dependencies == set() - assert node.dict(exclude_unset=True) != old_node_data + assert node.model_dump(exclude_unset=True) != old_node_data diff --git a/packages/models-library/tests/test_project_nodes_io.py b/packages/models-library/tests/test_project_nodes_io.py index aac9568eccb..9a191c7d674 100644 --- a/packages/models-library/tests/test_project_nodes_io.py +++ b/packages/models-library/tests/test_project_nodes_io.py @@ -12,7 +12,7 @@ SimCoreFileLink, SimcoreS3DirectoryID, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError @pytest.fixture() @@ -120,11 +120,13 @@ def test_store_discriminator(): def test_simcore_s3_directory_id(): # the only allowed path is the following - result = parse_obj_as(SimcoreS3DirectoryID, f"{UUID_0}/{UUID_0}/ok-simcore-dir/") + result = TypeAdapter(SimcoreS3DirectoryID).validate_python( + f"{UUID_0}/{UUID_0}/ok-simcore-dir/" + ) assert result == f"{UUID_0}/{UUID_0}/ok-simcore-dir/" # re-parsing must work the same thing works - assert parse_obj_as(SimcoreS3DirectoryID, result) + assert TypeAdapter(SimcoreS3DirectoryID).validate_python(result) # all below are not allowed for invalid_path in ( @@ -132,10 +134,12 @@ def test_simcore_s3_directory_id(): f"{UUID_0}/{UUID_0}/a-dir/a-file", ): with pytest.raises(ValidationError): - parse_obj_as(SimcoreS3DirectoryID, invalid_path) + TypeAdapter(SimcoreS3DirectoryID).validate_python(invalid_path) with pytest.raises(ValidationError, match="Not allowed subdirectory found in"): - parse_obj_as(SimcoreS3DirectoryID, f"{UUID_0}/{UUID_0}/a-dir/a-subdir/") + TypeAdapter(SimcoreS3DirectoryID).validate_python( + f"{UUID_0}/{UUID_0}/a-dir/a-subdir/" + ) @pytest.mark.parametrize( diff --git a/packages/models-library/tests/test_projects.py b/packages/models-library/tests/test_projects.py index 8b646345c2d..5cbb0e13573 100644 --- a/packages/models-library/tests/test_projects.py +++ b/packages/models-library/tests/test_projects.py @@ -28,7 +28,7 @@ def minimal_project(faker: Faker) -> dict[str, Any]: def test_project_minimal_model(minimal_project: dict[str, Any]): - project = Project.parse_obj(minimal_project) + project = Project.model_validate(minimal_project) assert project assert project.thumbnail is None @@ -37,7 +37,7 @@ def test_project_minimal_model(minimal_project: dict[str, Any]): def test_project_with_thumbnail_as_empty_string(minimal_project: dict[str, Any]): thumbnail_empty_string = deepcopy(minimal_project) thumbnail_empty_string.update({"thumbnail": ""}) - project = Project.parse_obj(thumbnail_empty_string) + project = Project.model_validate(thumbnail_empty_string) assert project assert project.thumbnail is None diff --git a/packages/models-library/tests/test_projects_state.py b/packages/models-library/tests/test_projects_state.py index 2895d71f3a1..236d65a5538 100644 --- a/packages/models-library/tests/test_projects_state.py +++ b/packages/models-library/tests/test_projects_state.py @@ -5,7 +5,7 @@ def test_project_locked_with_missing_owner_raises(): with pytest.raises(ValueError): ProjectLocked(value=True, status=ProjectStatus.OPENED) - ProjectLocked.parse_obj({"value": False, "status": ProjectStatus.OPENED}) + ProjectLocked.model_validate({"value": False, "status": ProjectStatus.OPENED}) @pytest.mark.parametrize( @@ -19,4 +19,4 @@ def test_project_locked_with_missing_owner_raises(): ) def test_project_locked_with_allowed_values(lock: bool, status: ProjectStatus): with pytest.raises(ValueError): - ProjectLocked.parse_obj({"value": lock, "status": status}) + ProjectLocked.model_validate({"value": lock, "status": status}) diff --git a/packages/models-library/tests/test_rest_pagination_utils.py b/packages/models-library/tests/test_rest_pagination_utils.py index f9887a1bf71..acaf6bc9d5c 100644 --- a/packages/models-library/tests/test_rest_pagination_utils.py +++ b/packages/models-library/tests/test_rest_pagination_utils.py @@ -41,7 +41,7 @@ def test_paginating_data(base_url): ) assert data_obj - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( total=total_number_of_items, count=len(data_chunk), limit=limit, offset=offset @@ -75,7 +75,7 @@ def test_paginating_data(base_url): offset += len(data_chunk) assert model_instance.links.next is not None - data_obj: PageDict = paginate_data( + data_obj: PageDict = paginate_data( # type: ignore[no-redef] data_chunk, request_url=URL(model_instance.links.next), total=total_number_of_items, @@ -83,7 +83,7 @@ def test_paginating_data(base_url): offset=offset, ) - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( total=total_number_of_items, @@ -127,7 +127,7 @@ def test_paginating_data(base_url): assert offset == last_chunk_offset assert model_instance.links.next is not None - data_obj: PageDict = paginate_data( + data_obj: PageDict = paginate_data( # type: ignore[no-redef] data_chunk, request_url=URL(model_instance.links.next), total=total_number_of_items, @@ -136,7 +136,7 @@ def test_paginating_data(base_url): ) assert data_obj - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index f9c096804cc..287e3d5614b 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -31,7 +31,7 @@ ) from models_library.services_resources import DEFAULT_SINGLE_SERVICE_NAME from models_library.utils.string_substitution import TextTemplate -from pydantic import BaseModel, TypeAdapter, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError from pydantic.json import pydantic_encoder @@ -69,12 +69,12 @@ def test_simcore_service_labels(example: dict, items: int, uses_dynamic_sidecar: simcore_service_labels = SimcoreServiceLabels.model_validate(example) assert simcore_service_labels - assert len(simcore_service_labels.dict(exclude_unset=True)) == items + assert len(simcore_service_labels.model_dump(exclude_unset=True)) == items assert simcore_service_labels.needs_dynamic_sidecar == uses_dynamic_sidecar def test_service_settings(): - simcore_settings_settings_label = SimcoreServiceSettingsLabel.parse_obj( + simcore_settings_settings_label = SimcoreServiceSettingsLabel.model_validate( SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"] ) assert simcore_settings_settings_label @@ -126,7 +126,7 @@ def test_path_mappings_json_encoding(): path_mappings = PathMappingsLabel.model_validate(example) print(path_mappings) assert ( - PathMappingsLabel.parse_raw(path_mappings.model_dump_json()) + PathMappingsLabel.model_validate_json(path_mappings.model_dump_json()) == path_mappings ) @@ -262,7 +262,7 @@ def test_container_outgoing_permit_list_and_container_allow_internet_with_compos "simcore.service.container-http-entrypoint": container_name_1, } - instance = DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + instance = DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( instance.containers_allowed_outgoing_permit_list[container_name_1][0] == expected_host_permit_list_policy @@ -291,7 +291,7 @@ def test_container_outgoing_permit_list_and_container_allow_internet_without_com ) }, ): - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert TypeAdapter(DynamicSidecarServiceLabels).validate_json(json.dumps(dict_data)) def test_container_allow_internet_no_compose_spec_not_ok(): @@ -299,7 +299,7 @@ def test_container_allow_internet_no_compose_spec_not_ok(): "simcore.service.containers-allowed-outgoing-internet": json.dumps(["hoho"]), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert "Expected only 1 entry 'container' not '{'hoho'}" in f"{exec_info.value}" @@ -312,7 +312,7 @@ def test_container_allow_internet_compose_spec_not_ok(): "simcore.service.containers-allowed-outgoing-internet": json.dumps(["hoho"]), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert f"container='hoho' not found in {compose_spec=}" in f"{exec_info.value}" @@ -331,7 +331,7 @@ def test_container_outgoing_permit_list_no_compose_spec_not_ok(): ), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Expected only one entry '{DEFAULT_SINGLE_SERVICE_NAME}' not 'container_name'" in f"{exec_info.value}" @@ -355,7 +355,7 @@ def test_container_outgoing_permit_list_compose_spec_not_ok(): "simcore.service.compose-spec": json.dumps(compose_spec), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Trying to permit list container='container_name' which was not found in {compose_spec=}" in f"{exec_info.value}" @@ -378,7 +378,7 @@ def test_not_allowed_in_both_permit_list_and_outgoing_internet(): } with pytest.raises(ValidationError) as exec_info: - DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Not allowed common_containers={{'{container_name}'}} detected" @@ -520,30 +520,27 @@ def test_can_parse_labels_with_osparc_identifiers( vendor_environments: dict[str, Any], service_labels: dict[str, str] ): # can load OSPARC_VARIABLE_ identifiers!! - service_meta = SimcoreServiceLabels.parse_obj(service_labels) + service_meta = SimcoreServiceLabels.model_validate(service_labels) assert service_meta.containers_allowed_outgoing_permit_list nat_rule: NATRule = service_meta.containers_allowed_outgoing_permit_list[ "s4l-core" ][0] - assert nat_rule.hostname == parse_obj_as( - OsparcVariableIdentifier, + assert nat_rule.hostname == TypeAdapter(OsparcVariableIdentifier).validate_python( "${OSPARC_VARIABLE_VENDOR_SECRET_LICENSE_SERVER_HOSTNAME}", ) assert nat_rule.tcp_ports == [ - parse_obj_as( - OsparcVariableIdentifier, + TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_VENDOR_SECRET_TCP_PORTS_1", ), - parse_obj_as( - OsparcVariableIdentifier, + TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_VENDOR_SECRET_TCP_PORTS_2", ), 3, ] service_meta = replace_osparc_variable_identifier(service_meta, vendor_environments) - service_meta_str = service_meta.json() + service_meta_str = service_meta.model_dump_json() not_replaced_vars = {"OSPARC_VARIABLE_OS_TYPE_LINUX"} @@ -552,7 +549,7 @@ def test_can_parse_labels_with_osparc_identifiers( continue assert osparc_variable_name not in service_meta_str - service_meta_str = service_meta.json( + service_meta_str = service_meta.model_dump_json( include={"containers_allowed_outgoing_permit_list"} ) @@ -568,7 +565,7 @@ def test_resolving_some_service_labels_at_load_time( vendor_environments: dict[str, Any], service_labels: dict[str, str] ): print(json.dumps(service_labels, indent=1)) - service_meta = SimcoreServiceLabels.parse_obj(service_labels) + service_meta = SimcoreServiceLabels.model_validate(service_labels) # NOTE: replacing all OsparcVariableIdentifier instances nested inside objects # this also does a partial replacement if there is no entry inside the vendor_environments @@ -593,7 +590,7 @@ def test_resolving_some_service_labels_at_load_time( # NOTE: that this model needs all values to be resolved before parsing them # otherwise it might fail!! The question is whether these values can be resolved at this point # NOTE: vendor values are in the database and therefore are available at this point - labels = SimcoreServiceLabels.parse_obj(service_labels) + labels = SimcoreServiceLabels.model_validate(service_labels) print("After", labels.model_dump_json(indent=1)) formatted_json = service_meta.model_dump_json(indent=1) @@ -613,4 +610,4 @@ def test_user_preferences_path_is_part_of_exiting_volume(): ), } with pytest.raises(ValidationError, match="user_preferences_path=/tmp/outputs"): - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(labels_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(labels_data)) diff --git a/packages/models-library/tests/test_service_settings_nat_rule.py b/packages/models-library/tests/test_service_settings_nat_rule.py index 66319e9435c..c6f9f05497c 100644 --- a/packages/models-library/tests/test_service_settings_nat_rule.py +++ b/packages/models-library/tests/test_service_settings_nat_rule.py @@ -9,7 +9,7 @@ replace_osparc_variable_identifier, ) from models_library.service_settings_nat_rule import NATRule -from pydantic import parse_obj_as +from pydantic import TypeAdapter SUPPORTED_TEMPLATES: set[str] = { "$OSPARC_VARIABLE_%s", @@ -79,7 +79,7 @@ def _all_combinations_from_dict(data: dict[Any, Any]) -> list[dict[Any, Any]]: def test_nat_rule_with_osparc_variable_identifier( nat_rule_dict: dict[str, Any], osparc_variables: dict[str, Any] ): - nat_rule = parse_obj_as(NATRule, nat_rule_dict) + nat_rule = TypeAdapter(NATRule).validate_python(nat_rule_dict) with pytest.raises(UnresolvedOsparcVariableIdentifierError): list(nat_rule.iter_tcp_ports()) @@ -87,7 +87,7 @@ def test_nat_rule_with_osparc_variable_identifier( # NOTE: values are mostly replaced in place unless it's used as first level replace_osparc_variable_identifier(nat_rule, osparc_variables) - nat_rule_str = nat_rule.json() + nat_rule_str = nat_rule.model_dump_json() for osparc_variable_name in osparc_variables: assert osparc_variable_name not in nat_rule_str @@ -108,7 +108,9 @@ def test_nat_rule_with_osparc_variable_identifier( ], ) def test_______(replace_with_value: Any): - a_var = parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_some_var") + a_var = TypeAdapter(OsparcVariableIdentifier).validate_python( + "$OSPARC_VARIABLE_some_var" + ) assert isinstance(a_var, OsparcVariableIdentifier) replaced_var = replace_osparc_variable_identifier( @@ -151,7 +153,7 @@ def test_replace_an_instance_of_osparc_variable_identifier( except TypeError: formatted_template = var_template - a_var = parse_obj_as(OsparcVariableIdentifier, formatted_template) + a_var = TypeAdapter(OsparcVariableIdentifier).validate_python(formatted_template) assert isinstance(a_var, OsparcVariableIdentifier) replace_with_identifier_default = identifier_has_default and replace_with_default diff --git a/packages/models-library/tests/test_services_io.py b/packages/models-library/tests/test_services_io.py index acfb02a05b1..e056647665f 100644 --- a/packages/models-library/tests/test_services_io.py +++ b/packages/models-library/tests/test_services_io.py @@ -15,7 +15,7 @@ def test_service_port_units(tests_data_dir: Path): data = yaml.safe_load((tests_data_dir / "metadata-sleeper-2.0.2.yaml").read_text()) print(ServiceMetaDataPublished.schema_json(indent=2)) - service_meta = ServiceMetaDataPublished.parse_obj(data) + service_meta = ServiceMetaDataPublished.model_validate(data) assert service_meta.inputs for input_nameid, input_meta in service_meta.inputs.items(): diff --git a/packages/models-library/tests/test_sidecar_volumes.py b/packages/models-library/tests/test_sidecar_volumes.py index e9c54554288..402899726bc 100644 --- a/packages/models-library/tests/test_sidecar_volumes.py +++ b/packages/models-library/tests/test_sidecar_volumes.py @@ -14,4 +14,4 @@ def test_volume_state_equality_does_not_use_last_changed(status: VolumeStatus): # at the moment of the creation of the object. assert VolumeState(status=status) == VolumeState(status=status) schema_property_count = len(VolumeState.schema()["properties"]) - assert len(VolumeState(status=status).dict()) == schema_property_count + assert len(VolumeState(status=status).model_dump()) == schema_property_count diff --git a/packages/models-library/tests/test_user_preferences.py b/packages/models-library/tests/test_user_preferences.py index 272e73cf6e5..edac734f0c7 100644 --- a/packages/models-library/tests/test_user_preferences.py +++ b/packages/models-library/tests/test_user_preferences.py @@ -15,20 +15,24 @@ _AutoRegisterMeta, _BaseUserPreferenceModel, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter _SERVICE_KEY_AND_VERSION_SAMPLES: list[tuple[ServiceKey, ServiceVersion]] = [ ( - parse_obj_as(ServiceKey, "simcore/services/comp/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python("simcore/services/comp/something-1231"), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ( - parse_obj_as(ServiceKey, "simcore/services/dynamic/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/something-1231" + ), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ( - parse_obj_as(ServiceKey, "simcore/services/frontend/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python( + "simcore/services/frontend/something-1231" + ), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ] @@ -54,7 +58,7 @@ def test_base_user_preference_model(value: Any, preference_type: PreferenceType) base_data = _get_base_user_preferences_data( preference_type=preference_type, value=value ) - assert parse_obj_as(_BaseUserPreferenceModel, base_data) + assert TypeAdapter(_BaseUserPreferenceModel).validate_python(base_data) def test_frontend_preferences(value: Any): @@ -64,7 +68,7 @@ def test_frontend_preferences(value: Any): base_data.update({"preference_identifier": "pref-name"}) # check serialization - frontend_preference = parse_obj_as(FrontendUserPreference, base_data) + frontend_preference = TypeAdapter(FrontendUserPreference).validate_python(base_data) assert set(frontend_preference.to_db().keys()) == {"value"} @@ -80,7 +84,7 @@ def test_user_service_preferences(value: Any, mock_file_path: Path): "file_path": mock_file_path, } ) - instance = parse_obj_as(UserServiceUserPreference, base_data) + instance = TypeAdapter(UserServiceUserPreference).validate_python(base_data) assert set(instance.to_db().keys()) == { "value", "service_key", @@ -96,7 +100,7 @@ def unregister_defined_classes() -> Iterator[None]: def test__frontend__user_preference(value: Any, unregister_defined_classes: None): - pref1 = FrontendUserPreference.parse_obj( + pref1 = FrontendUserPreference.model_validate( {"preference_identifier": "pref_id", "value": value} ) assert isinstance(pref1, FrontendUserPreference) @@ -112,7 +116,7 @@ def test__user_service__user_preference( mock_file_path: Path, unregister_defined_classes: None, ): - pref1 = UserServiceUserPreference.parse_obj( + pref1 = UserServiceUserPreference.model_validate( { "value": value, "service_key": service_key, @@ -123,8 +127,8 @@ def test__user_service__user_preference( # NOTE: these will be stored as bytes, # check bytes serialization/deserialization - pref1_as_bytes = pref1.json().encode() - new_instance = UserServiceUserPreference.parse_raw(pref1_as_bytes) + pref1_as_bytes = pref1.model_dump_json().encode() + new_instance = UserServiceUserPreference.model_validate_json(pref1_as_bytes) assert new_instance == pref1 diff --git a/packages/models-library/tests/test_utils_common_validators.py b/packages/models-library/tests/test_utils_common_validators.py index d4c7cb5409f..db9df708b0f 100644 --- a/packages/models-library/tests/test_utils_common_validators.py +++ b/packages/models-library/tests/test_utils_common_validators.py @@ -48,8 +48,8 @@ class Model(BaseModel): empty_str_to_none_pre_validator ) - model = Model.parse_obj({"nullable_message": None}) - assert model == Model.parse_obj({"nullable_message": ""}) + model = Model.model_validate({"nullable_message": None}) + assert model == Model.model_validate({"nullable_message": ""}) def test_none_to_empty_str_pre_validator(): @@ -60,8 +60,8 @@ class Model(BaseModel): none_to_empty_str_pre_validator ) - model = Model.parse_obj({"message": ""}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": ""}) + assert model == Model.model_validate({"message": None}) def test_null_or_none_str_to_none_validator(): @@ -72,20 +72,20 @@ class Model(BaseModel): null_or_none_str_to_none_validator ) - model = Model.parse_obj({"message": "none"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "none"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "null"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "null"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "NoNe"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "NoNe"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "NuLl"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "NuLl"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": None}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": None}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": ""}) - assert model == Model.parse_obj({"message": ""}) + model = Model.model_validate({"message": ""}) + assert model == Model.model_validate({"message": ""}) diff --git a/packages/models-library/tests/test_utils_pydantic_tools_extension.py b/packages/models-library/tests/test_utils_pydantic_tools_extension.py index 34cbb528dbb..0bf8abdbb41 100644 --- a/packages/models-library/tests/test_utils_pydantic_tools_extension.py +++ b/packages/models-library/tests/test_utils_pydantic_tools_extension.py @@ -39,8 +39,8 @@ def test_schema(): def test_only_required(): model = MyModel(a=1, b=2) - assert model.dict() == {"a": 1, "b": 2, "c": 42, "d": None, "e": None} - assert model.dict(exclude_unset=True) == {"a": 1, "b": 2} + assert model.model_dump() == {"a": 1, "b": 2, "c": 42, "d": None, "e": None} + assert model.model_dump(exclude_unset=True) == {"a": 1, "b": 2} def test_parse_obj_or_none(): diff --git a/packages/models-library/tests/test_utils_service_io.py b/packages/models-library/tests/test_utils_service_io.py index 51e4324e2be..7ef8d4070a0 100644 --- a/packages/models-library/tests/test_utils_service_io.py +++ b/packages/models-library/tests/test_utils_service_io.py @@ -17,7 +17,7 @@ from models_library.services import ServiceInput, ServiceOutput, ServicePortKey from models_library.utils.json_schema import jsonschema_validate_schema from models_library.utils.services_io import get_service_io_json_schema -from pydantic import parse_obj_as +from pydantic import TypeAdapter example_inputs_labels = [ e for e in ServiceInput.model_config["json_schema_extra"]["examples"] if e["label"] @@ -32,11 +32,11 @@ def service_port(request: pytest.FixtureRequest) -> ServiceInput | ServiceOutput try: index = example_inputs_labels.index(request.param) example = ServiceInput.model_config["json_schema_extra"]["examples"][index] - return ServiceInput.parse_obj(example) + return ServiceInput.model_validate(example) except ValueError: index = example_outputs_labels.index(request.param) example = ServiceOutput.model_config["json_schema_extra"]["examples"][index] - return ServiceOutput.parse_obj(example) + return ServiceOutput.model_validate(example) def test_get_schema_from_port(service_port: ServiceInput | ServiceOutput): @@ -73,8 +73,12 @@ def test_against_service_metadata_configs(metadata_path: Path): meta = json.loads(metadata_path.read_text()) - inputs = parse_obj_as(dict[ServicePortKey, ServiceInput], meta["inputs"]) - outputs = parse_obj_as(dict[ServicePortKey, ServiceOutput], meta["outputs"]) + inputs = TypeAdapter(dict[ServicePortKey, ServiceInput]).validate_python( + meta["inputs"] + ) + outputs = TypeAdapter(dict[ServicePortKey, ServiceOutput]).validate_python( + meta["outputs"] + ) for port in itertools.chain(inputs.values(), outputs.values()): schema = get_service_io_json_schema(port) diff --git a/packages/models-library/tests/test_utils_specs_substitution.py b/packages/models-library/tests/test_utils_specs_substitution.py index 0670e56e271..c523271bd2a 100644 --- a/packages/models-library/tests/test_utils_specs_substitution.py +++ b/packages/models-library/tests/test_utils_specs_substitution.py @@ -12,7 +12,7 @@ SpecsSubstitutionsResolver, SubstitutionValue, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter @pytest.fixture() @@ -49,7 +49,7 @@ def available_osparc_variables( "SERVICE_VERSION": service_version, "DISPLAY": "True", } - return parse_obj_as(dict[str, SubstitutionValue], environs) + return TypeAdapter(dict[str, SubstitutionValue]).validate_python(environs) @pytest.mark.parametrize( diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index b7e4320e9e0..a4df8b512d1 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -4,6 +4,8 @@ aiosmtplib==3.0.2 # via -r requirements/_base.in alembic==1.13.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in async-timeout==4.0.3 @@ -63,7 +65,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -72,10 +74,22 @@ pydantic==1.10.17 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings referencing==0.35.1 # via # jsonschema @@ -108,6 +122,7 @@ typing-extensions==4.12.2 # via # alembic # pydantic + # pydantic-core # typer yarl==1.9.4 # via -r requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 48dd95db3fc..ab645dfb576 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -10,8 +10,6 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in greenlet==3.0.3 @@ -68,7 +66,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -90,11 +90,6 @@ tenacity==9.0.0 # via -r requirements/_test.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in typing-extensions==4.12.2 diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index 71884cfdaf4..8204f34a33c 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -71,22 +71,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/notifications-library/tests/email/test_email_events.py b/packages/notifications-library/tests/email/test_email_events.py index 5e3786ab234..995da5faf4e 100644 --- a/packages/notifications-library/tests/email/test_email_events.py +++ b/packages/notifications-library/tests/email/test_email_events.py @@ -66,8 +66,8 @@ def ipinfo(faker: Faker) -> dict[str, Any]: @pytest.fixture def request_form(faker: Faker) -> dict[str, Any]: return AccountRequestInfo( - **AccountRequestInfo.Config.schema_extra["example"] - ).dict() + **AccountRequestInfo.model_config["json_schema_extra"]["example"] + ).model_dump() @pytest.fixture diff --git a/packages/notifications-library/tests/with_db/conftest.py b/packages/notifications-library/tests/with_db/conftest.py index bdd3d0f3d09..750f3cc24a4 100644 --- a/packages/notifications-library/tests/with_db/conftest.py +++ b/packages/notifications-library/tests/with_db/conftest.py @@ -14,7 +14,7 @@ from models_library.products import ProductName from models_library.users import GroupID, UserID from notifications_library._templates import get_default_named_templates -from pydantic import validate_arguments +from pydantic import validate_call from simcore_postgres_database.models.jinja2_templates import jinja2_templates from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products @@ -165,7 +165,7 @@ def set_template_to_product( sqlalchemy_async_engine: AsyncEngine, product: dict[str, Any] ): # NOTE: needs all fixture products in db - @validate_arguments + @validate_call async def _(template_name: IDStr, product_name: ProductName) -> None: async with sqlalchemy_async_engine.begin() as conn: await conn.execute( @@ -179,7 +179,7 @@ async def _(template_name: IDStr, product_name: ProductName) -> None: @pytest.fixture def unset_template_to_product(sqlalchemy_async_engine: AsyncEngine): - @validate_arguments + @validate_call async def _(template_name: IDStr, product_name: ProductName) -> None: async with sqlalchemy_async_engine.begin() as conn: await conn.execute( diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in index 48679f44663..0294edf9114 100644 --- a/packages/postgres-database/requirements/_base.in +++ b/packages/postgres-database/requirements/_base.in @@ -3,6 +3,7 @@ # --constraint ../../../requirements/constraints.txt --constraint ./constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in alembic pydantic diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index aaf19732f53..5cb99144fd9 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -1,5 +1,7 @@ alembic==1.13.2 # via -r requirements/_base.in +annotated-types==0.7.0 + # via pydantic async-timeout==4.0.3 # via asyncpg asyncpg==0.29.0 @@ -18,10 +20,12 @@ multidict==6.0.5 # via yarl psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in +pydantic-core==2.23.3 + # via pydantic sqlalchemy==1.4.53 # via # -c requirements/../../../requirements/constraints.txt @@ -31,5 +35,6 @@ typing-extensions==4.12.2 # via # alembic # pydantic + # pydantic-core yarl==1.9.4 # via -r requirements/_base.in diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 245a367c69b..8bd80b78b95 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -10,8 +10,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in greenlet==3.0.3 @@ -70,11 +68,6 @@ sqlalchemy==1.4.53 # aiopg sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-docker==7.1.0.20240821 # via -r requirements/_test.in types-psycopg2==2.9.21.20240819 diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index 10d7162ab81..9247bc4b1a9 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -70,22 +70,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index b901feff8e4..8df1aecf670 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -12,7 +12,8 @@ --requirement _test.txt # installs this repo's packages -pytest-simcore @ ../../packages/pytest-simcore/ +simcore-common-library @ ../common-library/ +pytest-simcore @ ../pytest-simcore/ # current module simcore-postgres-database @ . diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt index 8136f1a48b5..095f8383b2a 100644 --- a/packages/postgres-database/requirements/dev.txt +++ b/packages/postgres-database/requirements/dev.txt @@ -13,7 +13,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../../packages/pytest-simcore/ +--editable ../common-library/ +--editable ../pytest-simcore/ + # current module --editable . diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index e0f59cdcfd2..6dcca321a73 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -5,21 +5,22 @@ from datetime import datetime from enum import Enum from functools import reduce -from typing import Any, ClassVar, Final, TypeAlias, cast +from typing import Annotated, Any, ClassVar, Final, TypeAlias, cast import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy +from common_library.errors_classes import OsparcErrorMixin from pydantic import ( BaseModel, - ConstrainedStr, + ConfigDict, Field, NonNegativeInt, PositiveInt, + StringConstraints, + TypeAdapter, ValidationError, - parse_obj_as, ) -from pydantic.errors import PydanticErrorMixin from simcore_postgres_database.utils_ordering import OrderByDict from sqlalchemy import Column, func from sqlalchemy.dialects import postgresql @@ -63,8 +64,8 @@ """ -class FoldersError(PydanticErrorMixin, RuntimeError): - pass +class FoldersError(OsparcErrorMixin, RuntimeError): + ... class InvalidFolderNameError(FoldersError): @@ -294,13 +295,17 @@ def _get_filter_for_enabled_permissions( ### -class FolderName(ConstrainedStr): - regex = re.compile( - r'^(?!.*[<>:"/\\|?*\]])(?!.*\b(?:LPT9|COM1|LPT1|COM2|LPT3|LPT4|CON|COM5|COM3|COM4|AUX|PRN|LPT2|LPT5|COM6|LPT7|NUL|COM8|LPT6|COM9|COM7|LPT8)\b).+$', - re.IGNORECASE, - ) - min_length = 1 - max_length = 255 +FolderName: TypeAlias = Annotated[ + str, + StringConstraints( + min_length=1, + max_length=255, + pattern=re.compile( + r'^(?!.*[<>:"/\\|?*\]])(?!.*\b(?:LPT9|COM1|LPT1|COM2|LPT3|LPT4|CON|COM5|COM3|COM4|AUX|PRN|LPT2|LPT5|COM6|LPT7|NUL|COM8|LPT6|COM9|COM7|LPT8)\b).+$', + re.IGNORECASE, + ), + ), +] class FolderEntry(BaseModel): @@ -313,9 +318,7 @@ class FolderEntry(BaseModel): modified: datetime = Field(alias="access_modified") my_access_rights: _FolderPermissions access_rights: dict[_GroupID, _FolderPermissions] - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class _ResolvedAccessRights(BaseModel): @@ -327,9 +330,7 @@ class _ResolvedAccessRights(BaseModel): write: bool delete: bool level: int - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) async def _get_resolved_access_rights( @@ -529,7 +530,7 @@ async def folder_create( RootFolderRequiresAtLeastOnePrimaryGroupError """ try: - parse_obj_as(FolderName, name) + TypeAdapter(FolderName).validate_python(name) except ValidationError as exc: raise InvalidFolderNameError(name=name, reason=f"{exc}") from exc diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 39749b7fdbf..c8aa9962d43 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -5,8 +5,8 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy -from pydantic import BaseModel -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin +from pydantic import BaseModel, ConfigDict from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation @@ -18,7 +18,7 @@ # -class BaseProjectsMetadataError(PydanticErrorMixin, RuntimeError): +class BaseProjectsMetadataError(OsparcErrorMixin, RuntimeError): msg_template: str = "Project metadata unexpected error" @@ -53,10 +53,7 @@ class ProjectMetadata(BaseModel): parent_node_id: uuid.UUID | None root_parent_project_uuid: uuid.UUID | None root_parent_node_id: uuid.UUID | None - - class Config: - frozen = True - orm_mode = True + model_config = ConfigDict(frozen=True, from_attributes=True) # diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 09cb8a561f4..cb47141b1ab 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -5,8 +5,8 @@ import sqlalchemy from aiopg.sa.connection import SAConnection -from pydantic import BaseModel, Field -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin +from pydantic import BaseModel, ConfigDict, Field from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation, UniqueViolation @@ -17,7 +17,7 @@ # # Errors # -class BaseProjectNodesError(PydanticErrorMixin, RuntimeError): +class BaseProjectNodesError(OsparcErrorMixin, RuntimeError): msg_template: str = "Project nodes unexpected error" @@ -43,18 +43,16 @@ class ProjectNodeCreate(BaseModel): @classmethod def get_field_names(cls, *, exclude: set[str]) -> set[str]: - return {name for name in cls.__fields__ if name not in exclude} + return {name for name in cls.model_fields.keys() if name not in exclude} - class Config: - frozen = True + model_config = ConfigDict(frozen=True) class ProjectNode(ProjectNodeCreate): created: datetime.datetime modified: datetime.datetime - class Config(ProjectNodeCreate.Config): - orm_mode = True + model_config = ConfigDict(from_attributes=True) @dataclass(frozen=True, kw_only=True) @@ -85,7 +83,7 @@ async def add( [ { "project_uuid": f"{self.project_uuid}", - **node.dict(), + **node.model_dump(), } for node in nodes ] diff --git a/packages/postgres-database/tests/test_utils_projects_nodes.py b/packages/postgres-database/tests/test_utils_projects_nodes.py index a20083608dd..50d2af96911 100644 --- a/packages/postgres-database/tests/test_utils_projects_nodes.py +++ b/packages/postgres-database/tests/test_utils_projects_nodes.py @@ -412,9 +412,9 @@ async def test_get_project_id_from_node_id_raises_if_multiple_projects_with_same assert len(project1_nodes) == 1 project2_nodes = await project2_repo.add(connection, nodes=[shared_node]) assert len(project2_nodes) == 1 - assert project1_nodes[0].dict( + assert project1_nodes[0].model_dump( include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) - ) == project2_nodes[0].dict( + ) == project2_nodes[0].model_dump( include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) ) with pytest.raises(ProjectNodesNonUniqueNodeFoundError): diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py index 48fb2d1283e..e6afeac8e7b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py @@ -32,7 +32,7 @@ async def s3_client(s3_settings: S3Settings) -> typing.AsyncIterator[S3Client]: config=Config(signature_version="s3v4"), ) assert isinstance(session_client, ClientCreatorContext) - client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) + client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] yield client diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index 077fb25d51a..74f007973c5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -11,7 +11,7 @@ from faker import Faker from models_library.utils.fastapi_encoders import jsonable_encoder from moto.server import ThreadedMotoServer -from pydantic import AnyHttpUrl, SecretStr, parse_obj_as +from pydantic import SecretStr from pytest_mock.plugin import MockerFixture from settings_library.basic_types import IDStr from settings_library.ec2 import EC2Settings @@ -75,7 +75,7 @@ def mocked_ec2_server_envs( mocked_ec2_server_settings: EC2Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_ec2_server_settings.dict() + changed_envs: EnvVarsDict = mocked_ec2_server_settings.model_dump() return setenvs_from_dict(monkeypatch, {**changed_envs}) @@ -101,10 +101,7 @@ def mocked_ssm_server_settings( ) -> SSMSettings: return SSMSettings( SSM_ACCESS_KEY_ID=SecretStr("xxx"), - SSM_ENDPOINT=parse_obj_as( - AnyHttpUrl, - f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 - ), + SSM_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 SSM_SECRET_ACCESS_KEY=SecretStr("xxx"), ) @@ -124,10 +121,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=parse_obj_as( - AnyHttpUrl, - f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 - ), + S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 S3_SECRET_KEY=IDStr("xxx"), S3_BUCKET_NAME=IDStr(f"pytest{faker.pystr().lower()}"), S3_REGION=IDStr("us-east-1"), @@ -139,5 +133,7 @@ def mocked_s3_server_envs( mocked_s3_server_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_s3_server_settings.dict(exclude_unset=True) + changed_envs: EnvVarsDict = mocked_s3_server_settings.model_dump( + mode="json", exclude_unset=True + ) return setenvs_from_dict(monkeypatch, {**changed_envs}) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py index 5780937a2c0..91cd5e2d428 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py @@ -106,9 +106,9 @@ def external_registry_settings( if external_envfile_dict: config = { field: external_envfile_dict.get(field, None) - for field in RegistrySettings.__fields__ + for field in RegistrySettings.model_fields } - return RegistrySettings.parse_obj(config) + return RegistrySettings.model_validate(config) return None diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py index 9d675c45e11..3f4058b72e9 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py @@ -24,7 +24,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import EmailStr, HttpUrl, parse_obj_as +from pydantic import EmailStr, HttpUrl, TypeAdapter from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, ) @@ -34,27 +34,27 @@ @pytest.fixture def wallet_id(faker: Faker) -> WalletID: - return parse_obj_as(WalletID, faker.pyint()) + return TypeAdapter(WalletID).validate_python(faker.pyint()) @pytest.fixture def wallet_name(faker: Faker) -> IDStr: - return parse_obj_as(IDStr, f"wallet-{faker.word()}") + return TypeAdapter(IDStr).validate_python(f"wallet-{faker.word()}") @pytest.fixture -def invoice_url(faker: Faker) -> HttpUrl: - return parse_obj_as(HttpUrl, faker.image_url()) +def invoice_url(faker: Faker) -> str: + return faker.image_url() @pytest.fixture -def invoice_pdf_url(faker: Faker) -> HttpUrl: - return parse_obj_as(HttpUrl, faker.image_url()) +def invoice_pdf_url(faker: Faker) -> str: + return faker.image_url() @pytest.fixture def stripe_invoice_id(faker: Faker) -> StripeInvoiceID: - return parse_obj_as(StripeInvoiceID, f"in_{faker.word()}") + return TypeAdapter(StripeInvoiceID).validate_python(f"in_{faker.word()}") @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py index f82636b6633..e55c1e489f0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py @@ -14,7 +14,7 @@ import pytest from faker import Faker from models_library.products import ProductName, StripePriceID, StripeTaxRateID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from .helpers.faker_factories import random_product @@ -51,8 +51,7 @@ def product_name() -> ProductName: def support_email( request: pytest.FixtureRequest, product_name: ProductName ) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption("--faker-support-email", default=None) or f"support@{product_name}.info", ) @@ -60,8 +59,7 @@ def support_email( @pytest.fixture def bcc_email(request: pytest.FixtureRequest, product_name: ProductName) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption("--faker-bcc-email", default=None) or f"finance@{product_name}-department.info", ) diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py index 643ffee8859..f2d0eee8105 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py @@ -13,7 +13,7 @@ from faker import Faker from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from pydantic import parse_obj_as +from pydantic import TypeAdapter _MESSAGE = ( "If set, it overrides the fake value of `{}` fixture." @@ -34,12 +34,11 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture def project_id(faker: Faker, request: pytest.FixtureRequest) -> ProjectID: - return parse_obj_as( - ProjectID, + return TypeAdapter(ProjectID).validate_python( request.config.getoption("--faker-project-id", default=None) or faker.uuid4(), ) @pytest.fixture def node_id(faker: Faker) -> NodeID: - return parse_obj_as(NodeID, faker.uuid4()) + return TypeAdapter(NodeID).validate_python(faker.uuid4()) diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py index 6ba011db47c..4e59b6db93a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py @@ -14,7 +14,7 @@ from faker import Faker from models_library.basic_types import IDStr from models_library.users import UserID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from .helpers.faker_factories import DEFAULT_TEST_PASSWORD, random_user @@ -61,8 +61,7 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture def user_id(faker: Faker, request: pytest.FixtureRequest) -> UserID: - return parse_obj_as( - UserID, + return TypeAdapter(UserID).validate_python( request.config.getoption("--faker-user-id", default=None) or faker.pyint(), ) @@ -74,8 +73,7 @@ def is_external_user_email(request: pytest.FixtureRequest) -> bool: @pytest.fixture def user_email(faker: Faker, request: pytest.FixtureRequest) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption(_FAKE_USER_EMAIL_OPTION, default=None) or faker.email(), ) @@ -93,7 +91,7 @@ def user_last_name(faker: Faker) -> str: @pytest.fixture def user_name(user_email: str) -> IDStr: - return parse_obj_as(IDStr, user_email.split("@")[0]) + return TypeAdapter(IDStr).validate_python(user_email.split("@")[0]) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index bc415ab3161..f51a5d8211b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -209,7 +209,7 @@ def random_product( registration_email_template: str | None = None, fake: Faker = DEFAULT_FAKER, **overrides, -): +) -> dict[str, Any]: """ Foreign keys are: diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py index fd5afaa183a..177b1330e36 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py @@ -5,7 +5,7 @@ import httpx import jsonref -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from settings_library.catalog import CatalogSettings from settings_library.director_v2 import DirectorV2Settings from settings_library.storage import StorageSettings @@ -87,7 +87,7 @@ def _get_params( raise VerbNotInPathError(msg) if (params := verb_spec.get("parameters")) is None: continue - all_params += parse_obj_as(list[CapturedParameter], params) + all_params += TypeAdapter(list[CapturedParameter]).validate_python(params) return set(all_params) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py index 89783d0591c..25f2abc8cd0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py @@ -1,17 +1,15 @@ from typing import Literal -from pydantic import BaseModel, Field, root_validator, validator +from pydantic import field_validator, model_validator, ConfigDict, BaseModel, Field from .httpx_calls_capture_errors import OpenApiSpecError class CapturedParameterSchema(BaseModel): - title: str | None - type_: Literal["str", "int", "float", "bool"] | None = Field( - None, alias="type", optional=True - ) + title: str | None = None + type_: Literal["str", "int", "float", "bool"] | None = Field(None, alias="type") pattern: str | None - format_: Literal["uuid"] | None = Field(None, alias="format", optional=True) + format_: Literal["uuid"] | None = Field(None, alias="format") exclusiveMinimum: bool | None minimum: int | None anyOf: list["CapturedParameterSchema"] | None @@ -22,7 +20,7 @@ class Config: validate_always = True allow_population_by_field_name = True - @validator("type_", pre=True) + @field_validator("type_", mode="before") @classmethod def preprocess_type_(cls, val): if val == "string": @@ -33,7 +31,7 @@ def preprocess_type_(cls, val): val = "bool" return val - @root_validator(pre=False) + @model_validator(mode="after") @classmethod def check_compatibility(cls, values): type_ = values.get("type_") @@ -100,10 +98,7 @@ class CapturedParameter(BaseModel): response_value: str | None = ( None # attribute for storing the params value in a concrete response ) - - class Config: - validate_always = True - allow_population_by_field_name = True + model_config = ConfigDict(validate_default=True, populate_by_name=True) def __hash__(self): return hash( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py index d9b5bb64437..9a36d4cc020 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py @@ -6,7 +6,7 @@ from fastapi.encoders import jsonable_encoder from httpx._types import URLTypes from jsonschema import ValidationError -from pydantic import parse_file_as +from pydantic import TypeAdapter from .httpx_calls_capture_errors import CaptureProcessingError from .httpx_calls_capture_models import HttpApiCallCaptureModel, get_captured_model @@ -14,6 +14,11 @@ _logger = logging.getLogger(__name__) +_HTTP_API_CALL_CAPTURE_MODEL_ADAPTER: TypeAdapter[ + list[HttpApiCallCaptureModel] +] = TypeAdapter(list[HttpApiCallCaptureModel]) + + class AsyncClientCaptureWrapper(httpx.AsyncClient): """ Adds captures mechanism @@ -41,8 +46,11 @@ async def request(self, method: str, url: URLTypes, **kwargs): or self._capture_file.read_text().strip() == "" ): self._capture_file.write_text("[]") - serialized_captures: list[HttpApiCallCaptureModel] = parse_file_as( - list[HttpApiCallCaptureModel], self._capture_file + + serialized_captures: list[ + HttpApiCallCaptureModel + ] = _HTTP_API_CALL_CAPTURE_MODEL_ADAPTER.validate_json( + self._capture_file.read_text() ) serialized_captures.append(capture) self._capture_file.write_text( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py index ed6381f5611..6eae044643b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py @@ -1,6 +1,6 @@ import pytest from _pytest.mark.structures import ParameterSet -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter def byte_size_ids(val) -> str | None: @@ -10,4 +10,4 @@ def byte_size_ids(val) -> str | None: def parametrized_file_size(size_str: str) -> ParameterSet: - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str) + return pytest.param(TypeAdapter(ByteSize).validate_python(size_str), id=size_str) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 5d7e721a832..2f0a03b575d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -8,14 +8,14 @@ from aiohttp import ClientSession from aws_library.s3 import MultiPartUploadLinks from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from servicelib.utils import limited_as_completed, logged_gather from types_aiobotocore_s3 import S3Client from .logging_tools import log_context -_SENDER_CHUNK_SIZE: Final[int] = parse_obj_as(ByteSize, "16Mib") +_SENDER_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python("16Mib") async def _file_sender( @@ -51,7 +51,7 @@ async def upload_file_part( f"--> uploading {this_file_chunk_size=} of {file=}, [{part_index+1}/{num_parts}]..." ) response = await session.put( - upload_url, + str(upload_url), data=_file_sender( file, offset=file_offset, diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index b6687e22239..a190fa6900e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -78,9 +78,9 @@ async def create_project( project_nodes={ NodeID(node_id): ProjectNodeCreate( node_id=NodeID(node_id), - required_resources=ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0], + required_resources=ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0], ) for node_id in project_data.get("workbench", {}) }, diff --git a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py index b6c0a5aad3b..6bc71929eb3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py @@ -14,7 +14,7 @@ import requests import requests.exceptions from docker.errors import APIError -from pydantic import HttpUrl, parse_obj_as +from pydantic import HttpUrl, TypeAdapter from tenacity import retry from tenacity.after import after_log from tenacity.retry import retry_if_exception_type @@ -56,7 +56,7 @@ def _wait_until_httpbin_is_responsive(): _wait_until_httpbin_is_responsive() - yield parse_obj_as(HttpUrl, base_url) + yield TypeAdapter(HttpUrl).validate_python(base_url) finally: with suppress(APIError): diff --git a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py index 5c8df1ff6c5..d8cd056c115 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py @@ -38,7 +38,7 @@ import pytest import respx import yaml -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.docker import get_service_published_port from pytest_simcore.helpers.host import get_localhost_ip @@ -213,9 +213,9 @@ def _( assert capture_path.suffix == ".json" if services_mocks_enabled: - captures: list[HttpApiCallCaptureModel] = parse_obj_as( - list[HttpApiCallCaptureModel], json.loads(capture_path.read_text()) - ) + captures: list[HttpApiCallCaptureModel] = TypeAdapter( + list[HttpApiCallCaptureModel] + ).validate_python(json.loads(capture_path.read_text())) if len(side_effects_callbacks) > 0: assert len(side_effects_callbacks) == len(captures) diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 46cee6fbeeb..38b9d2bdf8d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -31,5 +31,5 @@ def minio_s3_settings_envs( minio_s3_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = minio_s3_settings.dict(exclude_unset=True) + changed_envs: EnvVarsDict = minio_s3_settings.model_dump(exclude_unset=True) return setenvs_from_dict(monkeypatch, changed_envs) diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py index 297e9a9ab13..04d285a601e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py @@ -51,7 +51,7 @@ def walk_model_examples_in_package(package: ModuleType) -> Iterator[ModelExample def iter_model_examples_in_module(module: object) -> Iterator[ModelExample]: - """Iterates on all examples defined as BaseModelClass.Config.schema_extra["example"] + """Iterates on all examples defined as BaseModelClass.model_config["json_schema_extra"]["example"] Usage: @@ -64,7 +64,7 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json.dumps(example_data)) - assert model_cls.parse_obj(example_data) + assert model_cls.model_validate(example_data) """ def _is_model_cls(obj) -> bool: diff --git a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py index 47188400e79..240e0100648 100644 --- a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py @@ -56,7 +56,7 @@ def rabbit_env_vars_dict( async def rabbit_settings(rabbit_env_vars_dict: EnvVarsDict) -> RabbitSettings: """Returns the settings of a rabbit service that is up and responsive""" - settings = RabbitSettings.parse_obj(rabbit_env_vars_dict) + settings = RabbitSettings.model_validate(rabbit_env_vars_dict) await wait_till_rabbit_responsive(settings.dsn) return settings diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index dffe3883c61..429783e7061 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -28,7 +28,7 @@ from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from yarl import URL @@ -107,8 +107,8 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: "62237c33-8d6c-4709-aa92-c3cf693dd6d2", ], } - returned_computation = ComputationTask.parse_obj( - ComputationTask.Config.schema_extra["examples"][0] + returned_computation = ComputationTask.model_validate( + ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ "id": f"{kwargs['json']['project_id']}", @@ -131,8 +131,8 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: state = RunningState.NOT_STARTED pipeline: dict[str, list[str]] = FULL_PROJECT_PIPELINE_ADJACENCY node_states = FULL_PROJECT_NODE_STATES - returned_computation = ComputationTask.parse_obj( - ComputationTask.Config.schema_extra["examples"][0] + returned_computation = ComputationTask.model_validate( + ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ "id": Path(url.path).name, @@ -154,11 +154,11 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: def create_cluster_cb(url, **kwargs) -> CallbackResult: assert "json" in kwargs, f"missing body in call to {url}" assert url.query.get("user_id") - random_cluster = Cluster.parse_obj( - random.choice(Cluster.Config.schema_extra["examples"]) + random_cluster = Cluster.model_validate( + random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) return CallbackResult( - status=201, payload=json.loads(random_cluster.json(by_alias=True)) + status=201, payload=json.loads(random_cluster.model_dump_json(by_alias=True)) ) @@ -169,9 +169,11 @@ def list_clusters_cb(url, **kwargs) -> CallbackResult: body=json.dumps( [ json.loads( - Cluster.parse_obj( - random.choice(Cluster.Config.schema_extra["examples"]) - ).json(by_alias=True) + Cluster.model_validate( + random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ) + ).model_dump_json(by_alias=True) ) for _ in range(3) ] @@ -185,12 +187,14 @@ def get_cluster_cb(url, **kwargs) -> CallbackResult: return CallbackResult( status=200, payload=json.loads( - Cluster.parse_obj( + Cluster.model_validate( { - **random.choice(Cluster.Config.schema_extra["examples"]), + **random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ), **{"id": cluster_id}, } - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ), ) @@ -214,12 +218,14 @@ def patch_cluster_cb(url, **kwargs) -> CallbackResult: return CallbackResult( status=200, payload=json.loads( - Cluster.parse_obj( + Cluster.model_validate( { - **random.choice(Cluster.Config.schema_extra["examples"]), + **random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ), **{"id": cluster_id}, } - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ), ) @@ -366,11 +372,13 @@ def get_upload_link_cb(url: URL, **kwargs) -> CallbackResult: if file_size := kwargs["params"].get("file_size") is not None: assert file_size upload_schema = FileUploadSchema( - chunk_size=parse_obj_as(ByteSize, "5GiB"), - urls=[parse_obj_as(AnyUrl, f"{scheme[link_type]}://{file_id}")], + chunk_size=TypeAdapter(ByteSize).validate_python("5GiB"), + urls=[ + TypeAdapter(AnyUrl).validate_python(f"{scheme[link_type]}://{file_id}") + ], links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, f"{url}:abort"), - complete_upload=parse_obj_as(AnyUrl, f"{url}:complete"), + abort_upload=TypeAdapter(AnyUrl).validate_python(f"{url}:abort"), + complete_upload=TypeAdapter(AnyUrl).validate_python(f"{url}:complete"), ), ) return CallbackResult( @@ -379,7 +387,7 @@ def get_upload_link_cb(url: URL, **kwargs) -> CallbackResult: ) # version 1 returns a presigned link presigned_link = PresignedLink( - link=parse_obj_as(AnyUrl, f"{scheme[link_type]}://{file_id}") + link=TypeAdapter(AnyUrl).validate_python(f"{scheme[link_type]}://{file_id}") ) return CallbackResult( status=status.HTTP_200_OK, @@ -436,7 +444,9 @@ async def storage_v0_service_mock( aioresponses_mocker.get( get_file_metadata_pattern, status=status.HTTP_200_OK, - payload={"data": FileMetaDataGet.Config.schema_extra["examples"][0]}, + payload={ + "data": FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] + }, repeat=True, ) aioresponses_mocker.get( @@ -465,8 +475,9 @@ def generate_future_link(url, **kwargs): (parsed_url.scheme, parsed_url.netloc, parsed_url.path, "", "", "") ) - payload: FileUploadCompleteResponse = parse_obj_as( - FileUploadCompleteResponse, + payload: FileUploadCompleteResponse = TypeAdapter( + FileUploadCompleteResponse + ).validate_python( { "links": { "state": stripped_url + ":complete/futures/" + str(faker.uuid4()) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index 9628d1058c9..e2f7654d3d0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -10,7 +10,7 @@ import tenacity from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_mock import MockerFixture from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL @@ -82,6 +82,8 @@ def create_simcore_file_id() -> Callable[[ProjectID, NodeID, str], SimcoreS3File def _creator( project_id: ProjectID, node_id: NodeID, file_name: str ) -> SimcoreS3FileID: - return parse_obj_as(SimcoreS3FileID, f"{project_id}/{node_id}/{file_name}") + return TypeAdapter(SimcoreS3FileID).validate_python( + f"{project_id}/{node_id}/{file_name}" + ) return _creator diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 904f97c614f..bbd97171063 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -31,8 +33,6 @@ docker==7.1.0 # via -r requirements/_base.in email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via pytest idna==3.7 # via # email-validator @@ -68,17 +68,27 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/models-library/requirements/_base.in pygments==2.18.0 # via rich pytest==8.3.2 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings python-slugify==8.0.4 # via cookiecutter pyyaml==6.0.2 @@ -109,8 +119,6 @@ six==1.16.0 # via python-dateutil text-unidecode==1.3 # via python-slugify -tomli==2.0.1 - # via pytest typer==0.12.4 # via -r requirements/_base.in types-python-dateutil==2.9.0.20240821 @@ -118,6 +126,7 @@ types-python-dateutil==2.9.0.20240821 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt index 925a176d40c..fa704698091 100644 --- a/packages/service-integration/requirements/_test.txt +++ b/packages/service-integration/requirements/_test.txt @@ -6,10 +6,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # pytest iniconfig==2.0.0 # via # -c requirements/_base.txt @@ -48,11 +44,6 @@ rpds-py==0.20.0 # referencing termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # -c requirements/_base.txt - # coverage - # pytest types-docker==7.1.0.20240821 # via -r requirements/_test.in types-jsonschema==4.23.0.20240813 diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index f76d26f846f..354746f70e4 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -69,22 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py index a390a469a41..a0a5f295402 100644 --- a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py +++ b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py @@ -5,9 +5,10 @@ # type:ignore from enum import Enum -from typing import Any +from typing import Any, TypeAlias -from pydantic import BaseModel, ConstrainedInt, Extra, Field, conint, constr +from pydantic import BaseModel, ConfigDict, Field, RootModel, StringConstraints +from typing_extensions import Annotated # MODIFICATIONS ------------------------------------------------------------------------- # @@ -19,17 +20,14 @@ # UserWarning: format of 'subnet_ip_address' not understood for 'string' - using default # port number range -class PortInt(ConstrainedInt): - gt = 0 - lt = 65535 +PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)] # ---------------------------------------------------------------------------------------- class Configuration(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") source: str | None = None target: str | None = None @@ -39,8 +37,7 @@ class Config: class CredentialSpec(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") config: str | None = None file: str | None = None @@ -54,31 +51,29 @@ class Condition(Enum): class DependsOn(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") condition: Condition class Extend(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") service: str file: str | None = None class Logging(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") driver: str | None = None - options: dict[constr(regex=r"^.+$"), str | float | None] | None = None + options: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float | None + ] | None = None class Port(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") mode: str | None = None host_ip: str | None = None @@ -96,8 +91,7 @@ class PullPolicy(Enum): class Secret1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") source: str | None = None target: str | None = None @@ -107,38 +101,33 @@ class Config: class Ulimit(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") hard: int soft: int class Bind(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") propagation: str | None = None create_host_path: bool | None = None class Volume2(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") nocopy: bool | None = None class Tmpfs(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") - size: conint(ge=0) | str | None = None + size: Annotated[int, Field(ge=0)] | str | None = None class Volume1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") type: str source: str | None = None @@ -151,8 +140,7 @@ class Config: class Healthcheck(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") disable: bool | None = None interval: str | None = None @@ -168,8 +156,7 @@ class Order(Enum): class RollbackConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") parallelism: int | None = None delay: str | None = None @@ -185,8 +172,7 @@ class Order1(Enum): class UpdateConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") parallelism: int | None = None delay: str | None = None @@ -197,16 +183,14 @@ class Config: class Limits(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") cpus: float | str | None = None memory: str | None = None class RestartPolicy(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") condition: str | None = None delay: str | None = None @@ -215,15 +199,13 @@ class Config: class Preference(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") spread: str | None = None class Placement(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") constraints: list[str] | None = None preferences: list[Preference] | None = None @@ -231,53 +213,49 @@ class Config: class DiscreteResourceSpec(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") kind: str | None = None value: float | None = None class GenericResource(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") discrete_resource_spec: DiscreteResourceSpec | None = None -class GenericResources(BaseModel): - __root__: list[GenericResource] +class GenericResources(RootModel): + root: list[GenericResource] class ConfigItem(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") subnet: str | None = None ip_range: str | None = None gateway: str | None = None - aux_addresses: dict[constr(regex=r"^.+$"), str] | None = None + aux_addresses: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str + ] | None = None class Ipam(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") driver: str | None = None config: list[ConfigItem] | None = None - options: dict[constr(regex=r"^.+$"), str] | None = None + options: dict[Annotated[str, StringConstraints(pattern=r"^.+$")], str] | None = None class External(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None class External1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None @@ -290,37 +268,39 @@ class External3(BaseModel): name: str | None = None -class ListOfStrings(BaseModel): - __root__: list[str] +class ListOfStrings(RootModel): + root: list[str] -class ListOrDict(BaseModel): - __root__: (dict[constr(regex=r".+"), str | float | bool | None] | list[str]) +class ListOrDict(RootModel): + root: ( + dict[ + Annotated[str, StringConstraints(pattern=r".+")], str | float | bool | None + ] + | list[str] + ) class BlkioLimit(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") path: str | None = None rate: int | str | None = None class BlkioWeight(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") path: str | None = None weight: int | None = None -class Constraints(BaseModel): - __root__: Any +class Constraints(RootModel): + root: Any = None class BuildItem(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") context: str | None = None dockerfile: str | None = None @@ -335,8 +315,7 @@ class Config: class BlkioConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") device_read_bps: list[BlkioLimit] | None = None device_read_iops: list[BlkioLimit] | None = None @@ -347,8 +326,7 @@ class Config: class Network1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") aliases: ListOfStrings | None = None ipv4_address: str | None = None @@ -358,8 +336,7 @@ class Config: class Device(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") capabilities: ListOfStrings | None = None count: str | int | None = None @@ -368,17 +345,18 @@ class Config: options: ListOrDict | None = None -class Devices(BaseModel): - __root__: list[Device] +class Devices(RootModel): + root: list[Device] class Network(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None ipam: Ipam | None = None external: External | None = None internal: bool | None = None @@ -388,32 +366,33 @@ class Config: class Volume(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None external: External1 | None = None labels: ListOrDict | None = None class Secret(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None file: str | None = None external: External2 | None = None labels: ListOrDict | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None template_driver: str | None = None class ComposeSpecConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None file: str | None = None @@ -422,13 +401,12 @@ class Config: template_driver: str | None = None -class StringOrList(BaseModel): - __root__: str | ListOfStrings +class StringOrList(RootModel): + root: str | ListOfStrings class Reservations(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") cpus: float | str | None = None memory: str | None = None @@ -437,16 +415,14 @@ class Config: class Resources(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") limits: Limits | None = None reservations: Reservations | None = None class Deployment(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") mode: str | None = None endpoint_mode: str | None = None @@ -460,8 +436,7 @@ class Config: class Service(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") deploy: Deployment | None = None build: str | BuildItem | None = None @@ -472,8 +447,8 @@ class Config: command: str | list[str] | None = None configs: list[str | Configuration] | None = None container_name: str | None = None - cpu_count: conint(ge=0) | None = None - cpu_percent: conint(ge=0, le=100) | None = None + cpu_count: Annotated[int, Field(ge=0)] | None = None + cpu_percent: Annotated[int, Field(ge=0, le=100)] | None = None cpu_shares: float | str | None = None cpu_quota: float | str | None = None cpu_period: float | str | None = None @@ -483,7 +458,10 @@ class Config: cpuset: str | None = None credential_spec: CredentialSpec | None = None depends_on: None | ( - ListOfStrings | dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), DependsOn] + ListOfStrings + | dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], DependsOn + ] ) = None device_cgroup_rules: ListOfStrings | None = None devices: list[str] | None = None @@ -515,10 +493,14 @@ class Config: memswap_limit: float | str | None = None network_mode: str | None = None networks: None | ( - ListOfStrings | dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Network1 | None] + ListOfStrings + | dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], + Network1 | None, + ] ) = None oom_kill_disable: bool | None = None - oom_score_adj: conint(ge=-1000, le=1000) | None = None + oom_score_adj: Annotated[int, Field(ge=-1000, le=1000)] | None = None pid: str | None = None pids_limit: float | str | None = None platform: str | None = None @@ -540,7 +522,9 @@ class Config: storage_opt: dict[str, Any] | None = None tmpfs: StringOrList | None = None tty: bool | None = None - ulimits: dict[constr(regex=r"^[a-z]+$"), int | Ulimit] | None = None + ulimits: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-z]+$")], int | Ulimit + ] | None = None user: str | None = None userns_mode: str | None = None volumes: list[str | Volume1] | None = None @@ -553,15 +537,27 @@ class ComposeSpecification(BaseModel): The Compose file is a YAML file defining a multi-containers based application. """ - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") version: str | None = Field( None, description="Version of the Compose specification used. Tools not implementing required version MUST reject the configuration file.", ) - services: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Service] | None = None - networks: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Network] | None = None - volumes: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Volume] | None = None - secrets: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Secret] | None = None - configs: None | (dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), ComposeSpecConfig]) = None + services: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Service + ] | None = None + networks: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Network + ] | None = None + volumes: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Volume + ] | None = None + secrets: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Secret + ] | None = None + configs: None | ( + dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], + ComposeSpecConfig, + ] + ) = None diff --git a/packages/service-integration/src/service_integration/cli/__init__.py b/packages/service-integration/src/service_integration/cli/__init__.py index 7a1c058957e..a146de5735d 100644 --- a/packages/service-integration/src/service_integration/cli/__init__.py +++ b/packages/service-integration/src/service_integration/cli/__init__.py @@ -62,7 +62,7 @@ def main( overrides["COMPOSE_VERSION"] = compose_version # save states - ctx.settings = AppSettings.parse_obj(overrides) # type: ignore[attr-defined] # pylint:disable=no-member + ctx.settings = AppSettings.model_validate(overrides) # type: ignore[attr-defined] # pylint:disable=no-member # diff --git a/packages/service-integration/src/service_integration/cli/_compose_spec.py b/packages/service-integration/src/service_integration/cli/_compose_spec.py index 117a4afa5ef..f6d9b16be9e 100644 --- a/packages/service-integration/src/service_integration/cli/_compose_spec.py +++ b/packages/service-integration/src/service_integration/cli/_compose_spec.py @@ -196,7 +196,7 @@ def create_compose( for n, config_name in enumerate(configs_kwargs_map): nth_compose_spec = create_docker_compose_image_spec( settings, **configs_kwargs_map[config_name] - ).dict(exclude_unset=True) + ).model_dump(exclude_unset=True) if n == 0: compose_spec_dict = nth_compose_spec diff --git a/packages/service-integration/src/service_integration/cli/_config.py b/packages/service-integration/src/service_integration/cli/_config.py index 2f41dcb6f72..4437907efa0 100644 --- a/packages/service-integration/src/service_integration/cli/_config.py +++ b/packages/service-integration/src/service_integration/cli/_config.py @@ -25,7 +25,7 @@ def _get_labels_or_raise(build_labels) -> dict[str, str]: return dict(item.strip().split("=") for item in build_labels) if isinstance(build_labels, dict): return build_labels - if labels__root__ := build_labels.__root__: + if labels__root__ := build_labels.root: assert isinstance(labels__root__, dict) # nosec return labels__root__ raise InvalidLabelsError(build_labels=build_labels) @@ -39,7 +39,7 @@ def _create_config_from_compose_spec( ): rich.print(f"Creating osparc config files from {compose_spec_path}") - compose_spec = ComposeSpecification.parse_obj( + compose_spec = ComposeSpecification.model_validate( yaml.safe_load(compose_spec_path.read_text()) ) @@ -56,7 +56,7 @@ def _save(service_name: str, filename: Path, model: BaseModel): rich.print(f"Creating {output_path} ...", end="") with output_path.open("wt") as fh: - data = json.loads(model.json(by_alias=True, exclude_none=True)) + data = json.loads(model.model_dump_json(by_alias=True, exclude_none=True)) yaml.safe_dump(data, fh, sort_keys=False) rich.print("DONE") diff --git a/packages/service-integration/src/service_integration/errors.py b/packages/service-integration/src/service_integration/errors.py index 8d216b7d918..65521d36371 100644 --- a/packages/service-integration/src/service_integration/errors.py +++ b/packages/service-integration/src/service_integration/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ServiceIntegrationError(PydanticErrorMixin, RuntimeError): +class ServiceIntegrationError(OsparcErrorMixin, RuntimeError): pass @@ -13,5 +13,5 @@ class UndefinedOciImageSpecError(ServiceIntegrationError): ... -class InvalidLabelsError(PydanticErrorMixin, ValueError): +class InvalidLabelsError(OsparcErrorMixin, ValueError): template_msg = "Invalid build labels {build_labels}" diff --git a/packages/service-integration/src/service_integration/oci_image_spec.py b/packages/service-integration/src/service_integration/oci_image_spec.py index e07a5e4cafc..3b9e45b46ab 100644 --- a/packages/service-integration/src/service_integration/oci_image_spec.py +++ b/packages/service-integration/src/service_integration/oci_image_spec.py @@ -11,8 +11,7 @@ from models_library.basic_types import SHA1Str, VersionStr from models_library.utils.labels_annotations import from_labels, to_labels -from pydantic import BaseModel, Field -from pydantic.config import Extra +from pydantic import BaseModel, ConfigDict, Field from pydantic.networks import AnyUrl # @@ -100,22 +99,20 @@ class OciImageSpecAnnotations(BaseModel): None, description="Digest of the image this image is based on (string)", ) - - class Config: - alias_generator = _underscore_as_dot - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=_underscore_as_dot, populate_by_name=True, extra="forbid" + ) @classmethod def from_labels_annotations( cls, labels: dict[str, str] ) -> "OciImageSpecAnnotations": data = from_labels(labels, prefix_key=OCI_LABEL_PREFIX, trim_key_head=False) - return cls.parse_obj(data) + return cls.model_validate(data) def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OCI_LABEL_PREFIX, ) return labels @@ -131,30 +128,30 @@ class LabelSchemaAnnotations(BaseModel): build_date: datetime vcs_ref: str vcs_url: AnyUrl - - class Config: - alias_generator = lambda field_name: field_name.replace("_", "-") - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=lambda field_name: field_name.replace("_", "-"), + populate_by_name=True, + extra="forbid", + ) @classmethod def create_from_env(cls) -> "LabelSchemaAnnotations": data = {} - for field_name in cls.__fields__: + for field_name in cls.model_fields: if value := os.environ.get(field_name.upper()): data[field_name] = value - return cls.parse_obj(data) + return cls.model_validate(data) def to_oci_data(self) -> dict[str, Any]: """Collects data that be converted to OCI labels. WARNING: label-schema has be deprecated in favor of OCI image specs """ - convertable_data = self.dict( + convertable_data = self.model_dump( include=set(_TO_OCI.keys()), exclude_unset=True, exclude_none=True ) assert set(convertable_data.keys()).issubset( # nosec - set(self.__fields__.keys()) + set(self.model_fields.keys()) ) # nosec return {_TO_OCI[key]: value for key, value in convertable_data.items()} diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 1a340729e41..9382b98b447 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -36,9 +36,14 @@ from_labels, to_labels, ) -from pydantic import NonNegativeInt, ValidationError -from pydantic.class_validators import root_validator, validator -from pydantic.config import Extra +from pydantic import ( + ConfigDict, + NonNegativeInt, + ValidationError, + ValidationInfo, + field_validator, + model_validator, +) from pydantic.fields import Field from pydantic.main import BaseModel @@ -67,7 +72,7 @@ class DockerComposeOverwriteConfig(ComposeSpecification): def create_default( cls, service_name: str | None = None ) -> "DockerComposeOverwriteConfig": - model: "DockerComposeOverwriteConfig" = cls.parse_obj( + model: "DockerComposeOverwriteConfig" = cls.model_validate( { "services": { service_name: { @@ -84,7 +89,7 @@ def create_default( def from_yaml(cls, path: Path) -> "DockerComposeOverwriteConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "DockerComposeOverwriteConfig" = cls.parse_obj(data) + model: "DockerComposeOverwriteConfig" = cls.model_validate(data) return model @@ -101,11 +106,11 @@ class MetadataConfig(ServiceMetaDataPublished): exclude=True, ) - @validator("contact") + @field_validator("contact") @classmethod - def _check_contact_in_authors(cls, v, values): + def _check_contact_in_authors(cls, v, info: ValidationInfo): """catalog service relies on contact and author to define access rights""" - authors_emails = {author.email for author in values["authors"]} + authors_emails = {author.email for author in info.data["authors"]} if v not in authors_emails: msg = "Contact {v} must be registered as an author" raise ValueError(msg) @@ -115,7 +120,7 @@ def _check_contact_in_authors(cls, v, values): def from_yaml(cls, path: Path) -> "MetadataConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "MetadataConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.model_validate(data) return model @classmethod @@ -123,12 +128,12 @@ def from_labels_annotations(cls, labels: dict[str, str]) -> "MetadataConfig": data = from_labels( labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False ) - model: "MetadataConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.model_validate(data) return model def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False, ) @@ -175,7 +180,7 @@ class SettingsItem(BaseModel): description="The value of the service setting (shall follow Docker REST API scheme for services", ) - @validator("type_", pre=True) + @field_validator("type_", mode="before") @classmethod def ensure_backwards_compatible_setting_type(cls, v): if v == "resources": @@ -183,18 +188,16 @@ def ensure_backwards_compatible_setting_type(cls, v): return "Resources" return v - @validator("value", pre=True) + @field_validator("value", mode="before") @classmethod - def check_value_against_custom_types(cls, v, values): - if (type_ := values.get("type_")) and type_ == "ContainerSpec": - ContainerSpec.parse_obj(v) + def check_value_against_custom_types(cls, v, info: ValidationInfo): + if (type_ := info.data.get("type_")) and type_ == "ContainerSpec": + ContainerSpec.model_validate(v) return v class ValidatingDynamicSidecarServiceLabels(DynamicSidecarServiceLabels): - class Config: - extra = Extra.allow - allow_population_by_field_name = True + model_config = ConfigDict(extra="allow", populate_by_name=True) def _underscore_as_minus(field_name: str) -> str: @@ -225,13 +228,13 @@ class RuntimeConfig(BaseModel): settings: list[SettingsItem] = Field(default_factory=list) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def ensure_compatibility(cls, v): # NOTE: if changes are applied to `DynamicSidecarServiceLabels` # these are also validated when ooil runs. try: - ValidatingDynamicSidecarServiceLabels.parse_obj(v) + ValidatingDynamicSidecarServiceLabels.model_validate(v) except ValidationError: _logger.exception( "Could not validate %s via %s", @@ -242,25 +245,26 @@ def ensure_compatibility(cls, v): return v - class Config: - alias_generator = _underscore_as_minus - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=_underscore_as_minus, + populate_by_name=True, + extra="forbid", + ) @classmethod def from_yaml(cls, path: Path) -> "RuntimeConfig": with path.open() as fh: data = yaml_safe_load(fh) - return cls.parse_obj(data) + return cls.model_validate(data) @classmethod def from_labels_annotations(cls, labels: dict[str, str]) -> "RuntimeConfig": data = from_labels(labels, prefix_key=OSPARC_LABEL_PREFIXES[1]) - return cls.parse_obj(data) + return cls.model_validate(data) def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OSPARC_LABEL_PREFIXES[1], ) return labels diff --git a/packages/service-integration/src/service_integration/osparc_image_specs.py b/packages/service-integration/src/service_integration/osparc_image_specs.py index df97e7c18b1..7f6dec6ca15 100644 --- a/packages/service-integration/src/service_integration/osparc_image_specs.py +++ b/packages/service-integration/src/service_integration/osparc_image_specs.py @@ -41,9 +41,9 @@ def create_image_spec( docker_compose_overwrite_cfg.services[service_name].build.labels = labels - overwrite_options = docker_compose_overwrite_cfg.services[service_name].build.dict( - exclude_none=True - ) + overwrite_options = docker_compose_overwrite_cfg.services[ + service_name + ].build.model_dump(exclude_none=True, serialize_as_any=True) build_spec = BuildItem(**overwrite_options) return ComposeSpecification( diff --git a/packages/service-integration/src/service_integration/settings.py b/packages/service-integration/src/service_integration/settings.py index 70c971c8db9..f8b977cc9a4 100644 --- a/packages/service-integration/src/service_integration/settings.py +++ b/packages/service-integration/src/service_integration/settings.py @@ -1,4 +1,5 @@ -from pydantic import BaseModel, BaseSettings, Field, SecretStr +from pydantic import BaseModel, Field, SecretStr +from pydantic_settings import BaseSettings, SettingsConfigDict class Registry(BaseModel): @@ -26,9 +27,9 @@ class AppSettings(BaseSettings): COMPOSE_VERSION: str = Field( "3.7", description="version of the docker-compose spec" ) - - class Config: - env_file_encoding = "utf-8" + model_config = SettingsConfigDict( + env_file_encoding="utf-8", + ) # TODO: load from ~/.osparc/service-integration.json or env file # TODO: add access to secrets diff --git a/packages/service-integration/src/service_integration/versioning.py b/packages/service-integration/src/service_integration/versioning.py index 3ed56868e50..0d7685a818f 100644 --- a/packages/service-integration/src/service_integration/versioning.py +++ b/packages/service-integration/src/service_integration/versioning.py @@ -1,15 +1,13 @@ -import re from datetime import datetime -from re import Pattern -from typing import Any, ClassVar +from typing import Annotated, TypeAlias from models_library.basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from packaging.version import Version -from pydantic import BaseModel, ConstrainedStr, Field +from pydantic import BaseModel, ConfigDict, Field, StringConstraints - -class SemanticVersionStr(ConstrainedStr): - regex: Pattern[str] | None = re.compile(SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) +SemanticVersionStr: TypeAlias = Annotated[ + str, StringConstraints(pattern=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) +] def bump_version_string(current_version: str, bump: str) -> str: @@ -52,8 +50,8 @@ class ExecutableVersionInfo(BaseModel): version: SemanticVersionStr released: datetime - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "display_name": "SEMCAD X", "display_version": "Matterhorn Student Edition 1", @@ -63,6 +61,7 @@ class Config: "released": "2021-11-19T14:58:45.900979", } } + ) class ServiceVersionInfo(BaseModel): @@ -72,11 +71,12 @@ class ServiceVersionInfo(BaseModel): ) released: datetime = Field(..., description="Publication/release date") - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "version": "1.0.0", # e.g. first time released as an osparc "integration_version": "2.1.0", "released": "2021-11-19T14:58:45.900979", } } + ) diff --git a/packages/service-integration/tests/test_command_compose.py b/packages/service-integration/tests/test_command_compose.py index 371d8a9dbdc..50f8b5b67b4 100644 --- a/packages/service-integration/tests/test_command_compose.py +++ b/packages/service-integration/tests/test_command_compose.py @@ -39,7 +39,7 @@ def test_make_docker_compose_meta( assert target_compose_specs.exists() # valid compose specs - compose_cfg = ComposeSpecification.parse_obj( + compose_cfg = ComposeSpecification.model_validate( yaml.safe_load(target_compose_specs.read_text()) ) assert compose_cfg.services @@ -48,8 +48,8 @@ def test_make_docker_compose_meta( compose_labels = compose_cfg.services[metadata_cfg.service_name()].build.labels assert compose_labels - assert isinstance(compose_labels.__root__, dict) + assert isinstance(compose_labels.root, dict) assert ( - MetadataConfig.from_labels_annotations(compose_labels.__root__) == metadata_cfg + MetadataConfig.from_labels_annotations(compose_labels.root) == metadata_cfg ) diff --git a/packages/service-integration/tests/test_compose_spec_model.py b/packages/service-integration/tests/test_compose_spec_model.py index 63cd0924c99..416dfbb8eef 100644 --- a/packages/service-integration/tests/test_compose_spec_model.py +++ b/packages/service-integration/tests/test_compose_spec_model.py @@ -9,7 +9,7 @@ def test_autogenerated_compose_spec_model(tests_data_dir: Path): docker_compose_path = tests_data_dir / "docker-compose-meta.yml" # tests if parses valid file - compose_spec = ComposeSpecification.parse_obj( + compose_spec = ComposeSpecification.model_validate( yaml.safe_load(docker_compose_path.read_text()) ) diff --git a/packages/service-integration/tests/test_oci_image_spec.py b/packages/service-integration/tests/test_oci_image_spec.py index ef2bd8b47d9..641594c9966 100644 --- a/packages/service-integration/tests/test_oci_image_spec.py +++ b/packages/service-integration/tests/test_oci_image_spec.py @@ -18,7 +18,7 @@ def test_label_schema_to_oci_conversion(monkeypatch): lsa = LabelSchemaAnnotations.create_from_env() - OciImageSpecAnnotations.parse_obj(lsa.to_oci_data()) + OciImageSpecAnnotations.model_validate(lsa.to_oci_data()) def test_create_annotations_from_metadata(tests_data_dir: Path): diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index e993bc25392..9a5a8bd7a81 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -52,8 +52,8 @@ def test_load_from_labels( runtime_cfg = RuntimeConfig.from_labels_annotations(labels) assert runtime_cfg.callbacks_mapping is not None - print(meta_cfg.json(exclude_unset=True, indent=2)) - print(runtime_cfg.json(exclude_unset=True, indent=2)) + print(meta_cfg.model_dump_json(exclude_unset=True, indent=2)) + print(runtime_cfg.model_dump_json(exclude_unset=True, indent=2)) # create yamls from config for model in (runtime_cfg, meta_cfg): @@ -62,7 +62,7 @@ def test_load_from_labels( ) with open(config_path, "w") as fh: data = json.loads( - model.json(exclude_unset=True, by_alias=True, exclude_none=True) + model.model_dump_json(exclude_unset=True, by_alias=True, exclude_none=True) ) yaml.safe_dump(data, fh, sort_keys=False) @@ -72,7 +72,8 @@ def test_load_from_labels( @pytest.mark.parametrize( - "example_data", SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + "example_data", + SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"], ) def test_settings_item_in_sync_with_service_settings_label( example_data: dict[str, Any] @@ -81,7 +82,7 @@ def test_settings_item_in_sync_with_service_settings_label( # First we parse with SimcoreServiceSettingLabelEntry since it also supports backwards compatibility # and will upgrade old version - example_model = SimcoreServiceSettingLabelEntry.parse_obj(example_data) + example_model = SimcoreServiceSettingLabelEntry.model_validate(example_data) # SettingsItem is exclusively for NEW labels, so it should not support backwards compatibility new_model = SettingsItem( @@ -91,4 +92,4 @@ def test_settings_item_in_sync_with_service_settings_label( ) # check back - SimcoreServiceSettingLabelEntry.parse_obj(new_model.dict(by_alias=True)) + SimcoreServiceSettingLabelEntry.model_validate(new_model.model_dump(by_alias=True)) diff --git a/packages/service-integration/tests/test_osparc_image_specs.py b/packages/service-integration/tests/test_osparc_image_specs.py index b482bc85a4c..6bec87425ad 100644 --- a/packages/service-integration/tests/test_osparc_image_specs.py +++ b/packages/service-integration/tests/test_osparc_image_specs.py @@ -58,8 +58,8 @@ def test_create_image_spec_impl(tests_data_dir: Path, settings: AppSettings): assert build_spec assert isinstance(build_spec, BaseModel) - print(build_spec.json(exclude_unset=True, indent=2)) - print(yaml.safe_dump(compose_spec.dict(exclude_unset=True), sort_keys=False)) + print(build_spec.model_dump_json(exclude_unset=True, indent=2)) + print(yaml.safe_dump(compose_spec.model_dump(exclude_unset=True), sort_keys=False)) def test_image_digest_is_not_a_label_annotation(tests_data_dir: Path): diff --git a/packages/service-integration/tests/test_osparc_runtime_specs.py b/packages/service-integration/tests/test_osparc_runtime_specs.py index 74d63e15e5b..153c85d27c4 100644 --- a/packages/service-integration/tests/test_osparc_runtime_specs.py +++ b/packages/service-integration/tests/test_osparc_runtime_specs.py @@ -17,8 +17,8 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): osparc_spec: dict = yaml.safe_load((tests_data_dir / "runtime.yml").read_text()) - pm_spec1 = PathMappingsLabel.parse_obj(osparc_spec["paths-mapping"]) - pm_spec2 = PathMappingsLabel.parse_obj( + pm_spec1 = PathMappingsLabel.model_validate(osparc_spec["paths-mapping"]) + pm_spec2 = PathMappingsLabel.model_validate( { "outputs_path": "/outputs", "inputs_path": "/inputs", @@ -58,12 +58,12 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): # FIXME: ensure all sources are different! (e.g. a/b/c and z/c have the same name!) - print(Service(volumes=volumes).json(exclude_unset=True, indent=2)) + print(Service(volumes=volumes).model_dump_json(exclude_unset=True, indent=2)) # TODO: _auto_map_to_service(osparc_spec["settings"]) data = {} for obj in osparc_spec["settings"]: - item = SettingsItem.parse_obj(obj) + item = SettingsItem.model_validate(obj) if item.name == "resources": # https://docs.docker.com/compose/compose-file/compose-file-v3/#resources @@ -87,7 +87,7 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): else: raise AssertionError(item) - print(Service(**data).json(exclude_unset=True, indent=2)) + print(Service(**data).model_dump_json(exclude_unset=True, indent=2)) def test_compatibility(): diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt index cebbcc2dd9f..f97b5b35ee8 100644 --- a/packages/service-library/requirements/_aiohttp.txt +++ b/packages/service-library/requirements/_aiohttp.txt @@ -14,9 +14,7 @@ aiosignal==1.3.1 aiozipkin==1.1.1 # via -r requirements/_aiohttp.in async-timeout==4.0.3 - # via - # aiohttp - # aiopg + # via aiopg attrs==24.2.0 # via # -r requirements/_aiohttp.in @@ -35,8 +33,6 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -greenlet==3.0.3 - # via sqlalchemy idna==3.7 # via # requests diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 69daeedb073..6d0447e7c0e 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -18,6 +18,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -26,10 +28,6 @@ arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==24.2.0 # via # aiohttp @@ -41,8 +39,6 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via anyio fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -76,7 +72,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -pydantic==1.10.17 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -85,12 +81,24 @@ pydantic==1.10.17 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -137,9 +145,9 @@ types-python-dateutil==2.9.0.20240821 typing-extensions==4.12.2 # via # aiodebug - # anyio # faststream # pydantic + # pydantic-core # typer yarl==1.9.4 # via diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 9a07c682d49..abaaac8e111 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # httpx @@ -11,13 +13,8 @@ certifi==2024.7.4 # httpx click==8.1.7 # via uvicorn -exceptiongroup==1.2.2 - # via anyio -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/_fastapi.in # prometheus-fastapi-instrumentator h11==0.14.0 @@ -42,18 +39,19 @@ prometheus-client==0.20.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/_fastapi.in -pydantic==1.10.17 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # fastapi +pydantic-core==2.23.4 + # via pydantic sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -61,9 +59,8 @@ starlette==0.27.0 # fastapi typing-extensions==4.12.2 # via - # anyio # fastapi # pydantic - # uvicorn + # pydantic-core uvicorn==0.30.6 # via -r requirements/_fastapi.in diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 789fdc81902..38282969f74 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -21,11 +21,6 @@ anyio==4.4.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_aiohttp.txt - # -c requirements/_base.txt - # aiohttp attrs==24.2.0 # via # -c requirements/_aiohttp.txt @@ -52,12 +47,6 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # -c requirements/_fastapi.txt - # anyio - # pytest execnet==2.1.1 # via pytest-xdist faker==27.0.0 @@ -70,10 +59,6 @@ frozenlist==1.4.1 # -c requirements/_base.txt # aiohttp # aiosignal -greenlet==3.0.3 - # via - # -c requirements/_aiohttp.txt - # sqlalchemy h11==0.14.0 # via # -c requirements/_fastapi.txt @@ -197,7 +182,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -250,11 +237,6 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in types-psycopg2==2.9.21.20240819 @@ -265,7 +247,6 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_fastapi.txt - # anyio # mypy # sqlalchemy2-stubs urllib3==2.2.2 diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index 4695266d9c8..b54db6d8f5c 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -72,22 +72,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/service-library/src/servicelib/aiohttp/application_setup.py b/packages/service-library/src/servicelib/aiohttp/application_setup.py index 4fae3acc09f..4da40aa0182 100644 --- a/packages/service-library/src/servicelib/aiohttp/application_setup.py +++ b/packages/service-library/src/servicelib/aiohttp/application_setup.py @@ -8,7 +8,7 @@ import arrow from aiohttp import web -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .application_keys import APP_CONFIG_KEY, APP_SETTINGS_KEY @@ -94,7 +94,9 @@ def _is_addon_enabled_from_config( for part in parts: if section and part == "enabled": # if section exists, no need to explicitly enable it - return parse_obj_as(bool, searched_config.get(part, True)) + return TypeAdapter(bool).validate_python( + searched_config.get(part, True) + ) searched_config = searched_config[part] except KeyError as ee: diff --git a/packages/service-library/src/servicelib/aiohttp/docker_utils.py b/packages/service-library/src/servicelib/aiohttp/docker_utils.py index 636b3492616..8e9393e1e69 100644 --- a/packages/service-library/src/servicelib/aiohttp/docker_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/docker_utils.py @@ -2,7 +2,7 @@ import aiohttp from models_library.docker import DockerGenericTag -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -68,9 +68,9 @@ async def retrieve_image_layer_information( # if the image has multiple architectures json_response = await response.json() try: - multi_arch_manifests = parse_obj_as( - DockerImageMultiArchManifestsV2, json_response - ) + multi_arch_manifests = TypeAdapter( + DockerImageMultiArchManifestsV2 + ).validate_python(json_response) # find the correct platform digest = "" for manifest in multi_arch_manifests.manifests: @@ -89,8 +89,12 @@ async def retrieve_image_layer_information( response.raise_for_status() assert response.status == status.HTTP_200_OK # nosec json_response = await response.json() - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) except ValidationError: - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) return None diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index df81371cbb8..bce905e9cd6 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -5,8 +5,9 @@ from typing import Any from aiohttp import web +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from models_library.utils.json_serialization import json_dumps -from pydantic import AnyHttpUrl, PositiveFloat +from pydantic import PositiveFloat from ...aiohttp import status from ...long_running_tasks._models import TaskGet @@ -67,17 +68,14 @@ async def start_long_running_task( ip_addr, port = request_.transport.get_extra_info( "sockname" ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info - status_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}", - scheme="http", + status_url = AnyHttpUrlLegacyAdapter.validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" # NOSONAR ) - result_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}", - scheme="http", + result_url = AnyHttpUrlLegacyAdapter.validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR ) - abort_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}", - scheme="http", + abort_url = AnyHttpUrlLegacyAdapter.validate_python( + f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" # NOSONAR ) task_get = TaskGet( task_id=task_id, diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py index c99cb1ce671..04071d5d07c 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py @@ -35,7 +35,7 @@ async def _start(session: ClientSession, url: URL, json: RequestBody | None) -> data, error = unwrap_envelope(await response.json()) assert not error # nosec assert data is not None # nosec - return TaskGet.parse_obj(data) + return TaskGet.model_validate(data) @retry(**_DEFAULT_AIOHTTP_RETRY_POLICY) @@ -57,7 +57,7 @@ async def _wait_for_completion( data, error = unwrap_envelope(await response.json()) assert not error # nosec assert data is not None # nosec - task_status = TaskStatus.parse_obj(data) + task_status = TaskStatus.model_validate(data) yield task_status.task_progress if not task_status.done: await asyncio.sleep( diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 2fd5d0e41f0..b70f0e821e4 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -14,7 +14,7 @@ from aiohttp import web from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, ValidationError, parse_obj_as +from pydantic import BaseModel, ConfigDict, TypeAdapter, ValidationError from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from . import status @@ -31,8 +31,9 @@ class RequestParams(BaseModel): class StrictRequestParams(BaseModel): """Use a base class for context, path and query parameters""" - class Config: - extra = Extra.forbid # strict + model_config = ConfigDict( + extra="forbid", + ) @contextmanager @@ -139,7 +140,7 @@ def parse_request_path_parameters_as( use_error_v1=use_enveloped_error_v1, ): data = dict(request.match_info) - return parameters_schema_cls.parse_obj(data) + return parameters_schema_cls.model_validate(data) def parse_request_query_parameters_as( @@ -168,8 +169,8 @@ def parse_request_query_parameters_as( ): data = dict(request.query) if hasattr(parameters_schema_cls, "parse_obj"): - return parameters_schema_cls.parse_obj(data) - model: ModelClass = parse_obj_as(parameters_schema_cls, data) + return parameters_schema_cls.model_validate(data) + model: ModelClass = TypeAdapter(parameters_schema_cls).validate_python(data) return model @@ -185,7 +186,7 @@ def parse_request_headers_as( use_error_v1=use_enveloped_error_v1, ): data = dict(request.headers) - return parameters_schema_cls.parse_obj(data) + return parameters_schema_cls.model_validate(data) async def parse_request_body_as( @@ -224,7 +225,7 @@ async def parse_request_body_as( # NOTE: model_schema can be 'list[T]' or 'dict[T]' which raise TypeError # with issubclass(model_schema, BaseModel) assert issubclass(model_schema_cls, BaseModel) # nosec - return model_schema_cls.parse_obj(body) # type: ignore [return-value] + return model_schema_cls.model_validate(body) # type: ignore [return-value] # used for model_schema like 'list[T]' or 'dict[T]' - return parse_obj_as(model_schema_cls, body) + return TypeAdapter(model_schema_cls).validate_python(body) # type: ignore[no-any-return] diff --git a/packages/service-library/src/servicelib/background_task.py b/packages/service-library/src/servicelib/background_task.py index e7a4c665c49..b1eba9bc54b 100644 --- a/packages/service-library/src/servicelib/background_task.py +++ b/packages/service-library/src/servicelib/background_task.py @@ -5,7 +5,7 @@ from collections.abc import AsyncIterator, Awaitable, Callable from typing import Final -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import TryAgain from tenacity.asyncio import AsyncRetrying from tenacity.stop import stop_after_attempt @@ -21,7 +21,7 @@ _MAX_TASK_CANCELLATION_ATTEMPTS: Final[int] = 3 -class PeriodicTaskCancellationError(PydanticErrorMixin, Exception): +class PeriodicTaskCancellationError(OsparcErrorMixin, Exception): msg_template: str = "Could not cancel task '{task_name}'" diff --git a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py index 69762108e71..382cb6c9f04 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py @@ -33,13 +33,19 @@ async def get_new_unique_identifier(self) -> TaskUID: async def _get_raw(self, redis_key: str) -> TaskScheduleModel | None: found_data = await self.redis_client_sdk.redis.get(redis_key) - return None if found_data is None else TaskScheduleModel.parse_raw(found_data) + return ( + None + if found_data is None + else TaskScheduleModel.model_validate_json(found_data) + ) async def get(self, task_uid: TaskUID) -> TaskScheduleModel | None: return await self._get_raw(_get_key(task_uid)) async def save(self, task_uid: TaskUID, task_schedule: TaskScheduleModel) -> None: - await self.redis_client_sdk.redis.set(_get_key(task_uid), task_schedule.json()) + await self.redis_client_sdk.redis.set( + _get_key(task_uid), task_schedule.model_dump_json() + ) async def remove(self, task_uid: TaskUID) -> None: await self.redis_client_sdk.redis.delete(_get_key(task_uid)) diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index 3b3159d5916..df976c623af 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -11,7 +11,7 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ProgressDetail from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ValidationError, parse_obj_as +from pydantic import BaseModel, ByteSize, ConfigDict, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -39,11 +39,11 @@ class DockerLayerSizeV2(BaseModel): media_type: str size: ByteSize digest: str - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) class DockerImageManifestsV2(BaseModel): @@ -51,39 +51,41 @@ class DockerImageManifestsV2(BaseModel): media_type: str config: DockerLayerSizeV2 layers: list[DockerLayerSizeV2] - - class Config: - keep_untouched = (cached_property,) - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + ignored_types=(cached_property,), + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) @cached_property def layers_total_size(self) -> ByteSize: - return parse_obj_as(ByteSize, sum(layer.size for layer in self.layers)) + return TypeAdapter(ByteSize).validate_python( + sum(layer.size for layer in self.layers) + ) class DockerImageMultiArchManifestsV2(BaseModel): schema_version: Literal[2] media_type: Literal["application/vnd.oci.image.index.v1+json"] manifests: list[dict[str, Any]] - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) class _DockerPullImage(BaseModel): status: str - id: str | None - progress_detail: ProgressDetail | None - progress: str | None - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + id: str | None = None + progress_detail: ProgressDetail | None = None + progress: str | None = None + model_config = ConfigDict( + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) DOCKER_HUB_HOST: Final[str] = "registry-1.docker.io" @@ -241,7 +243,9 @@ async def pull_image( image, stream=True, auth=registry_auth ): try: - parsed_progress = parse_obj_as(_DockerPullImage, pull_progress) + parsed_progress = TypeAdapter(_DockerPullImage).validate_python( + pull_progress + ) except ValidationError: _logger.exception( "Unexpected error while validating '%s'. " diff --git a/packages/service-library/src/servicelib/error_codes.py b/packages/service-library/src/servicelib/error_codes.py index 2803e3627ab..06cd14ac8bc 100644 --- a/packages/service-library/src/servicelib/error_codes.py +++ b/packages/service-library/src/servicelib/error_codes.py @@ -9,10 +9,9 @@ import re -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Annotated -from pydantic.tools import parse_obj_as -from pydantic.types import constr +from pydantic import StringConstraints, TypeAdapter _LABEL = "OEC:{}" _PATTERN = r"OEC:\d+" @@ -20,11 +19,13 @@ if TYPE_CHECKING: ErrorCodeStr = str else: - ErrorCodeStr = constr(strip_whitespace=True, regex=_PATTERN) + ErrorCodeStr = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=_PATTERN) + ] def create_error_code(exception: BaseException) -> ErrorCodeStr: - return parse_obj_as(ErrorCodeStr, _LABEL.format(id(exception))) + return TypeAdapter(ErrorCodeStr).validate_python(_LABEL.format(id(exception))) def parse_error_code(obj) -> set[ErrorCodeStr]: diff --git a/packages/service-library/src/servicelib/fastapi/docker_utils.py b/packages/service-library/src/servicelib/fastapi/docker_utils.py index 1c71c190a47..c7db4c1cf2d 100644 --- a/packages/service-library/src/servicelib/fastapi/docker_utils.py +++ b/packages/service-library/src/servicelib/fastapi/docker_utils.py @@ -5,7 +5,7 @@ import httpx from models_library.basic_types import IDStr from models_library.docker import DockerGenericTag -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -72,9 +72,9 @@ async def retrieve_image_layer_information( # if the image has multiple architectures json_response = response.json() try: - multi_arch_manifests = parse_obj_as( - DockerImageMultiArchManifestsV2, json_response - ) + multi_arch_manifests = TypeAdapter( + DockerImageMultiArchManifestsV2 + ).validate_python(json_response) # find the correct platform digest = "" for manifest in multi_arch_manifests.manifests: @@ -93,14 +93,20 @@ async def retrieve_image_layer_information( response.raise_for_status() assert response.status_code == status.HTTP_200_OK # nosec json_response = response.json() - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) except ValidationError: - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) return None -_DEFAULT_MIN_IMAGE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "200MiB") +_DEFAULT_MIN_IMAGE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "200MiB" +) async def pull_images( diff --git a/packages/service-library/src/servicelib/fastapi/errors.py b/packages/service-library/src/servicelib/fastapi/errors.py index 9eebef84637..139ed573fbe 100644 --- a/packages/service-library/src/servicelib/fastapi/errors.py +++ b/packages/service-library/src/servicelib/fastapi/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ApplicationRuntimeError(PydanticErrorMixin, RuntimeError): +class ApplicationRuntimeError(OsparcErrorMixin, RuntimeError): pass diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py index e00e0d636a2..c113321a488 100644 --- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py +++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py @@ -7,7 +7,7 @@ from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response from httpx._types import TimeoutTypes, URLTypes -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -30,7 +30,7 @@ """ -class BaseClientError(PydanticErrorMixin, Exception): +class BaseClientError(OsparcErrorMixin, Exception): """Used as based for all the raised errors""" msg_template: str = "{message}" diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index a2dda66735a..36458031ff8 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -4,9 +4,10 @@ import warnings from typing import Any, Awaitable, Callable, Final +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from fastapi import FastAPI, status from httpx import AsyncClient, HTTPError -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import PositiveFloat from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -23,6 +24,7 @@ DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 + logger = logging.getLogger(__name__) @@ -113,7 +115,7 @@ class Client: status, result and/or cancel of a long running task. """ - def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: AnyHttpUrl): + def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: str): """ `app`: used byt the `Client` to recover the `ClientConfiguration` `async_client`: an AsyncClient instance used by `Client` @@ -128,12 +130,9 @@ def _client_configuration(self) -> ClientConfiguration: output: ClientConfiguration = self.app.state.long_running_client_configuration return output - def _get_url(self, path: str) -> AnyHttpUrl: - output: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, - f"{self._base_url}{self._client_configuration.router_prefix}{path}", - ) - return output + def _get_url(self, path: str) -> str: + url = f"{self._base_url}{self._client_configuration.router_prefix}{path}" + return f"{AnyHttpUrlLegacyAdapter.validate_python(url)}" @retry_on_http_errors async def get_task_status( @@ -152,7 +151,7 @@ async def get_task_status( body=result.text, ) - return TaskStatus.parse_obj(result.json()) + return TaskStatus.model_validate(result.json()) @retry_on_http_errors async def get_task_result( @@ -171,7 +170,7 @@ async def get_task_result( body=result.text, ) - task_result = TaskResult.parse_obj(result.json()) + task_result = TaskResult.model_validate(result.json()) if task_result.error is not None: raise TaskClientResultError(message=task_result.error) return task_result.result diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py index 7cb61f29140..2c001525173 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py @@ -96,7 +96,7 @@ async def periodic_task_result( async def _status_update() -> TaskStatus: task_status: TaskStatus = await client.get_task_status(task_id) - logger.debug("Task status %s", task_status.json()) + logger.debug("Task status %s", task_status.model_dump_json()) await progress_manager.update( task_id=task_id, message=task_status.task_progress.message, diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py index c5d7429f01a..e8306b6d187 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py @@ -50,4 +50,4 @@ async def on_shutdown() -> None: # add error handlers # NOTE: Exception handler can not be added during the on_startup script, otherwise not working correctly - app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) + app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) # type: ignore[arg-type] diff --git a/packages/service-library/src/servicelib/file_utils.py b/packages/service-library/src/servicelib/file_utils.py index c90468cba2a..a52854c26e7 100644 --- a/packages/service-library/src/servicelib/file_utils.py +++ b/packages/service-library/src/servicelib/file_utils.py @@ -10,9 +10,9 @@ # https://docs.python.org/3/library/os.html#os.remove from aiofiles.os import remove from aiofiles.os import wrap as sync_to_async -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter -CHUNK_4KB: Final[ByteSize] = parse_obj_as(ByteSize, "4kb") # 4K blocks +CHUNK_4KB: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("4kb") # 4K blocks class AsyncStream(Protocol): diff --git a/packages/service-library/src/servicelib/long_running_tasks/_errors.py b/packages/service-library/src/servicelib/long_running_tasks/_errors.py index 73722f746ac..44dc03157f2 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_errors.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_errors.py @@ -1,10 +1,10 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseLongRunningError(PydanticErrorMixin, Exception): +class BaseLongRunningError(OsparcErrorMixin, Exception): """base exception for this module""" - code: str = "long_running_task.base_long_running_error" + code: str = "long_running_task.base_long_running_error" # type: ignore[assignment] class TaskAlreadyRunningError(BaseLongRunningError): diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index b211ca29fdc..fc240160b81 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -15,7 +15,7 @@ TaskResult, TaskStatus, ) -from pydantic import BaseModel, Field, PositiveFloat +from pydantic import BaseModel, ConfigDict, Field, PositiveFloat TaskName: TypeAlias = str @@ -46,9 +46,9 @@ class TrackedTask(BaseModel): "polled by the client who created it" ), ) - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) class ClientConfiguration(BaseModel): diff --git a/packages/service-library/src/servicelib/long_running_tasks/_task.py b/packages/service-library/src/servicelib/long_running_tasks/_task.py index 88960cb6327..641e78a96a8 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_task.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_task.py @@ -123,7 +123,9 @@ async def _stale_tasks_monitor_worker(self) -> None: logger.warning( "Removing stale task '%s' with status '%s'", task_id, - self.get_task_status(task_id, with_task_context=None).json(), + self.get_task_status( + task_id, with_task_context=None + ).model_dump_json(), ) await self.remove_task( task_id, with_task_context=None, reraise_errors=False @@ -210,7 +212,7 @@ def get_task_status( task = tracked_task.task done = task.done() - return TaskStatus.parse_obj( + return TaskStatus.model_validate( { "task_progress": tracked_task.task_progress, "done": done, diff --git a/packages/service-library/src/servicelib/progress_bar.py b/packages/service-library/src/servicelib/progress_bar.py index 782f89ba550..bf70c0c3e88 100644 --- a/packages/service-library/src/servicelib/progress_bar.py +++ b/packages/service-library/src/servicelib/progress_bar.py @@ -10,7 +10,7 @@ ProgressStructuredMessage, ProgressUnit, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .logging_utils import log_catch @@ -95,7 +95,7 @@ async def main_fct(): def __post_init__(self) -> None: if self.progress_unit is not None: - parse_obj_as(ProgressUnit, self.progress_unit) # type: ignore[arg-type] # mypy does not like Literal with parse_obj_as + TypeAdapter(ProgressUnit).validate_python(self.progress_unit) self._continuous_value_lock = asyncio.Lock() self.num_steps = max(1, self.num_steps) if self.step_weights: diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 0e3efbf3a11..c105c2b8ff3 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -1,21 +1,21 @@ from typing import Final -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin _ERROR_PREFIX: Final[str] = "rabbitmq_error" -class BaseRPCError(PydanticErrorMixin, RuntimeError): +class BaseRPCError(OsparcErrorMixin, RuntimeError): ... class RPCNotInitializedError(BaseRPCError): - code = f"{_ERROR_PREFIX}.not_started" + code = f"{_ERROR_PREFIX}.not_started" # type: ignore[assignment] msg_template = "Please check that the RabbitMQ RPC backend was initialized!" class RemoteMethodNotRegisteredError(BaseRPCError): - code = f"{_ERROR_PREFIX}.remote_not_registered" + code = f"{_ERROR_PREFIX}.remote_not_registered" # type: ignore[assignment] msg_template = ( "Could not find a remote method named: '{method_name}'. " "Message from remote server was returned: {incoming_message}. " diff --git a/packages/service-library/src/servicelib/rabbitmq/_models.py b/packages/service-library/src/servicelib/rabbitmq/_models.py index 565447072fa..e48e4bb13aa 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_models.py +++ b/packages/service-library/src/servicelib/rabbitmq/_models.py @@ -2,12 +2,13 @@ from collections.abc import Awaitable, Callable from typing import Any, Protocol +from models_library.basic_types import ConstrainedStr from models_library.rabbitmq_basic_types import ( REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS, RPCMethodName, RPCNamespace, ) -from pydantic import ConstrainedStr, parse_obj_as +from pydantic import TypeAdapter MessageHandler = Callable[[Any], Awaitable[bool]] @@ -23,11 +24,11 @@ def routing_key(self) -> str | None: class RPCNamespacedMethodName(ConstrainedStr): min_length: int = 1 max_length: int = 255 - regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS) + pattern: str = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS @classmethod def from_namespace_and_method( cls, namespace: RPCNamespace, method_name: RPCMethodName ) -> "RPCNamespacedMethodName": namespaced_method_name = f"{namespace}.{method_name}" - return parse_obj_as(cls, namespaced_method_name) + return TypeAdapter(cls).validate_python(namespaced_method_name) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py index 5837beddcae..d278bb350ba 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py @@ -1,11 +1,8 @@ -from typing import Any - from common_library.errors_classes import OsparcErrorMixin class CatalogApiBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + pass class CatalogItemNotFoundError(CatalogApiBaseError): diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py index cc67413aefe..83efa8f1d0e 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py @@ -16,7 +16,7 @@ ) from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import NonNegativeInt, parse_obj_as, validate_arguments +from pydantic import NonNegativeInt, TypeAdapter, validate_call from servicelib.logging_utils import log_decorator from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S @@ -40,7 +40,7 @@ async def list_services_paginated( # pylint: disable=too-many-arguments CatalogForbiddenError: no access-rights to list services """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -49,7 +49,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_services_paginated"), + TypeAdapter(RPCMethodName).validate_python("list_services_paginated"), product_name=product_name, user_id=user_id, limit=limit, @@ -60,7 +60,9 @@ async def _call( result = await _call( product_name=product_name, user_id=user_id, limit=limit, offset=offset ) - assert parse_obj_as(PageRpc[ServiceGetV2], result) is not None # nosec + assert ( + TypeAdapter(PageRpc[ServiceGetV2]).validate_python(result) is not None + ) # nosec return cast(PageRpc[ServiceGetV2], result) @@ -80,7 +82,7 @@ async def get_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -89,7 +91,7 @@ async def _call( ) -> Any: return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service"), + TypeAdapter(RPCMethodName).validate_python("get_service"), product_name=product_name, user_id=user_id, service_key=service_key, @@ -103,7 +105,7 @@ async def _call( service_key=service_key, service_version=service_version, ) - assert parse_obj_as(ServiceGetV2, result) is not None # nosec + assert TypeAdapter(ServiceGetV2).validate_python(result) is not None # nosec return cast(ServiceGetV2, result) @@ -125,7 +127,7 @@ async def update_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -135,7 +137,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_service"), + TypeAdapter(RPCMethodName).validate_python("update_service"), product_name=product_name, user_id=user_id, service_key=service_key, @@ -150,7 +152,7 @@ async def _call( service_version=service_version, update=update, ) - assert parse_obj_as(ServiceGetV2, result) is not None # nosec + assert TypeAdapter(ServiceGetV2).validate_python(result) is not None # nosec return cast(ServiceGetV2, result) @@ -170,7 +172,7 @@ async def check_for_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -179,7 +181,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "check_for_service"), + TypeAdapter(RPCMethodName).validate_python("check_for_service"), product_name=product_name, user_id=user_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py index 5e104db333c..6d7bf2a722c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseDynamicSchedulerRPCError(PydanticErrorMixin, Exception): +class BaseDynamicSchedulerRPCError(OsparcErrorMixin, Exception): ... diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index 9da2dad425e..3dcc9ed502f 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -10,7 +10,7 @@ from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient @@ -26,6 +26,8 @@ DEFAULT_LEGACY_WB_TO_DV2_HTTP_REQUESTS_TIMEOUT_S * 2 ) +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_service_status( @@ -33,7 +35,7 @@ async def get_service_status( ) -> NodeGetIdle | DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service_status"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_service_status"), node_id=node_id, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -49,7 +51,7 @@ async def run_dynamic_service( ) -> DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "run_dynamic_service"), + _RPC_METHOD_NAME_ADAPTER.validate_python("run_dynamic_service"), dynamic_service_start=dynamic_service_start, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -66,7 +68,7 @@ async def stop_dynamic_service( ) -> None: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "stop_dynamic_service"), + _RPC_METHOD_NAME_ADAPTER.validate_python("stop_dynamic_service"), dynamic_service_stop=dynamic_service_stop, timeout_s=timeout_s, ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py index 592959eb08c..ec05906b1ef 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -27,7 +27,7 @@ async def create_project_specific_data_dir( ) -> Path: output: Path = await rabbitmq_rpc_client.request( EFS_GUARDIAN_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_project_specific_data_dir"), + TypeAdapter(RPCMethodName).validate_python("create_project_specific_data_dir"), project_id=project_id, node_id=node_id, storage_directory_name=storage_directory_name, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 44549841802..f9c1a24f406 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ResourceUsageTrackerRuntimeError(PydanticErrorMixin, RuntimeError): +class ResourceUsageTrackerRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "Resource-usage-tracker unexpected error" diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py index a7dc4b5d404..218cd139fb4 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py @@ -16,7 +16,7 @@ PricingPlanUpdate, ) from models_library.services import ServiceKey, ServiceVersion -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -26,6 +26,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_pricing_plan( @@ -36,7 +38,7 @@ async def get_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, timeout_s=_DEFAULT_TIMEOUT_S, @@ -53,7 +55,7 @@ async def list_pricing_plans( ) -> list[PricingPlanGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_pricing_plans"), + _RPC_METHOD_NAME_ADAPTER.validate_python("list_pricing_plans"), product_name=product_name, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -69,7 +71,7 @@ async def create_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_plan"), data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -86,7 +88,7 @@ async def update_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_plan"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -104,8 +106,8 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ) -> list[PricingPlanToServiceGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as( - RPCMethodName, "list_connected_services_to_pricing_plan_by_pricing_plan" + _RPC_METHOD_NAME_ADAPTER.validate_python( + "list_connected_services_to_pricing_plan_by_pricing_plan" ), product_name=product_name, pricing_plan_id=pricing_plan_id, @@ -126,7 +128,7 @@ async def connect_service_to_pricing_plan( ) -> PricingPlanToServiceGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "connect_service_to_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("connect_service_to_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py index cec80e7186a..afa5611a92d 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py @@ -15,7 +15,7 @@ PricingUnitWithCostCreate, PricingUnitWithCostUpdate, ) -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -25,6 +25,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_pricing_unit( @@ -36,7 +38,7 @@ async def get_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_unit"), product_name=product_name, pricing_plan_id=pricing_plan_id, pricing_unit_id=pricing_unit_id, @@ -55,7 +57,7 @@ async def create_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -73,7 +75,7 @@ async def update_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py index e826363897a..ad7b2fd908b 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyUrl, NonNegativeInt, parse_obj_as +from pydantic import AnyUrl, NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -28,6 +28,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_service_run_page( @@ -44,7 +46,7 @@ async def get_service_run_page( ) -> ServiceRunPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service_run_page"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_service_run_page"), user_id=user_id, product_name=product_name, limit=limit, @@ -74,7 +76,9 @@ async def get_osparc_credits_aggregated_usages_page( ) -> OsparcCreditsAggregatedUsagesPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_osparc_credits_aggregated_usages_page"), + _RPC_METHOD_NAME_ADAPTER.validate_python( + "get_osparc_credits_aggregated_usages_page" + ), user_id=user_id, product_name=product_name, limit=limit, @@ -102,7 +106,7 @@ async def export_service_runs( ) -> AnyUrl: result: AnyUrl = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "export_service_runs"), + _RPC_METHOD_NAME_ADAPTER.validate_python("export_service_runs"), user_id=user_id, product_name=product_name, wallet_id=wallet_id, diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index 03847ae0b04..fce89d7790e 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -10,8 +10,8 @@ import redis.asyncio as aioredis import redis.exceptions +from common_library.errors_classes import OsparcErrorMixin from pydantic import NonNegativeFloat, NonNegativeInt -from pydantic.errors import PydanticErrorMixin from redis.asyncio.lock import Lock from redis.asyncio.retry import Retry from redis.backoff import ExponentialBackoff @@ -36,7 +36,7 @@ _logger = logging.getLogger(__name__) -class BaseRedisError(PydanticErrorMixin, RuntimeError): +class BaseRedisError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/service-library/src/servicelib/utils_meta.py b/packages/service-library/src/servicelib/utils_meta.py index 46fa78dd83e..6ee48fd4d56 100644 --- a/packages/service-library/src/servicelib/utils_meta.py +++ b/packages/service-library/src/servicelib/utils_meta.py @@ -6,7 +6,7 @@ from models_library.basic_types import VersionStr from packaging.version import Version -from pydantic import parse_obj_as +from pydantic import TypeAdapter class PackageInfo: @@ -40,7 +40,7 @@ def version(self) -> Version: @property def __version__(self) -> VersionStr: - return parse_obj_as(VersionStr, self._distribution.version) + return TypeAdapter(VersionStr).validate_python(self._distribution.version) @property def api_prefix_path_tag(self) -> str: diff --git a/packages/service-library/src/servicelib/utils_secrets.py b/packages/service-library/src/servicelib/utils_secrets.py index 7b74a491080..67c440ce044 100644 --- a/packages/service-library/src/servicelib/utils_secrets.py +++ b/packages/service-library/src/servicelib/utils_secrets.py @@ -2,7 +2,7 @@ import string from typing import Final -from pydantic import StrictInt, validate_arguments +from pydantic import StrictInt, validate_call MIN_PASSWORD_LENGTH = 30 _SAFE_SYMBOLS = "!$%*+,-.:=?@^_~" # avoid issues with parsing, espapes etc @@ -48,7 +48,7 @@ def are_secrets_equal(got: str, expected: str) -> bool: return secrets.compare_digest(got.encode("utf8"), expected.encode("utf8")) -@validate_arguments +@validate_call def secure_randint(start: StrictInt, end: StrictInt) -> int: """Generate a random integer between start (inclusive) and end (exclusive).""" if start >= end: diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py index 68d06d15278..987a68a4036 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py @@ -9,7 +9,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from faker import Faker -from pydantic import BaseModel, parse_obj_as +from pydantic import BaseModel, TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskId @@ -92,7 +92,7 @@ async def _caller(client: TestClient, **query_kwargs) -> TaskId: data, error = await assert_status(resp, status.HTTP_202_ACCEPTED) assert data assert not error - task_get = parse_obj_as(long_running_tasks.server.TaskGet, data) + task_get = TypeAdapter(long_running_tasks.server.TaskGet).validate_python(data) return task_get.task_id return _caller @@ -122,7 +122,7 @@ async def _waiter( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.parse_obj(data) + task_status = long_running_tasks.server.TaskStatus.model_validate(data) assert task_status assert task_status.done diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py index afd9e8f4fde..7907f092c24 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py @@ -18,7 +18,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskGet, TaskId @@ -75,12 +75,12 @@ async def test_workflow( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.parse_obj(data) + task_status = long_running_tasks.server.TaskStatus.model_validate(data) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) ) - print(f"<-- received task status: {task_status.json(indent=2)}") + print(f"<-- received task status: {task_status.model_dump_json(indent=2)}") assert task_status.done, "task incomplete" print( f"-- waiting for task status completed successfully: {json.dumps(attempt.retry_state.retry_object.statistics, indent=2)}" @@ -216,7 +216,7 @@ async def test_list_tasks( result = await client.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == NUM_TASKS # the task name is properly formatted @@ -235,5 +235,5 @@ async def test_list_tasks( result = await client.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == NUM_TASKS - (task_index + 1) diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py index 941ae31359d..5671eda108f 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py @@ -18,7 +18,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pydantic import create_model, parse_obj_as +from pydantic import TypeAdapter, create_model from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks._server import ( @@ -108,7 +108,7 @@ async def test_list_tasks( result = await client_with_task_context.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == 0 # the list should be full if we pass the expected context @@ -117,7 +117,7 @@ async def test_list_tasks( ) data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == 1 diff --git a/packages/service-library/tests/aiohttp/test_docker_utils.py b/packages/service-library/tests/aiohttp/test_docker_utils.py index 890ffdc588b..bcd2129abd2 100644 --- a/packages/service-library/tests/aiohttp/test_docker_utils.py +++ b/packages/service-library/tests/aiohttp/test_docker_utils.py @@ -11,7 +11,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.progress_bar import ProgressReport -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from servicelib import progress_bar from servicelib.aiohttp.docker_utils import retrieve_image_layer_information @@ -42,8 +42,7 @@ async def test_retrieve_image_layer_information( if "sha256" in service_tag: image_name = f"{service_repo}@{service_tag}" await remove_images_from_host([image_name]) - docker_image = parse_obj_as( - DockerGenericTag, + docker_image = TypeAdapter(DockerGenericTag).validate_python( f"{registry_settings.REGISTRY_URL}/{osparc_service['image']['name']}:{osparc_service['image']['tag']}", ) layer_information = await retrieve_image_layer_information( @@ -97,13 +96,13 @@ def _assert_progress_report_values( # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=0, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( exclude={"message"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=total, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( exclude={"message"} ) diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index 08e2f07bfbe..4e1b4f4e2e7 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -11,7 +11,7 @@ from aiohttp.test_utils import TestClient from faker import Faker from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, @@ -41,9 +41,9 @@ def create_fake(cls, faker: Faker): class MyRequestPathParams(BaseModel): project_uuid: UUID - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @classmethod def create_fake(cls, faker: Faker): @@ -55,7 +55,7 @@ class MyRequestQueryParams(BaseModel): label: str def as_params(self, **kwargs) -> dict[str, str]: - data = self.dict(**kwargs) + data = self.model_dump(**kwargs) return {k: f"{v}" for k, v in data.items()} @classmethod @@ -66,9 +66,9 @@ def create_fake(cls, faker: Faker): class MyRequestHeadersParams(BaseModel): user_agent: str = Field(alias="X-Simcore-User-Agent") optional_header: str | None = Field(default=None, alias="X-Simcore-Optional-Header") - - class Config: - allow_population_by_field_name = False + model_config = ConfigDict( + populate_by_name=False, + ) @classmethod def create_fake(cls, faker: Faker): @@ -111,7 +111,9 @@ def client(event_loop, aiohttp_client: Callable, faker: Faker) -> TestClient: async def _handler(request: web.Request) -> web.Response: # --------- UNDER TEST ------- # NOTE: app context does NOT need to be validated everytime! - context = MyRequestContext.parse_obj({**dict(request.app), **dict(request)}) + context = MyRequestContext.model_validate( + {**dict(request.app), **dict(request)} + ) path_params = parse_request_path_parameters_as( MyRequestPathParams, request, use_enveloped_error_v1=False @@ -129,11 +131,11 @@ async def _handler(request: web.Request) -> web.Response: return web.json_response( { - "parameters": path_params.dict(), - "queries": query_params.dict(), - "body": body.dict(), - "context": context.dict(), - "headers": headers_params.dict(), + "parameters": path_params.model_dump(), + "queries": query_params.model_dump(), + "body": body.model_dump(), + "context": context.model_dump(), + "headers": headers_params.model_dump(), }, dumps=json_dumps, ) @@ -194,21 +196,21 @@ async def test_parse_request_as( r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_200_OK, f"{await r.text()}" got = await r.json() - assert got["parameters"] == jsonable_encoder(path_params.dict()) - assert got["queries"] == jsonable_encoder(query_params.dict()) - assert got["body"] == body.dict() + assert got["parameters"] == jsonable_encoder(path_params.model_dump()) + assert got["queries"] == jsonable_encoder(query_params.model_dump()) + assert got["body"] == body.model_dump() assert got["context"] == { "secret": client.app[APP_SECRET_KEY], "user_id": 42, } - assert got["headers"] == jsonable_encoder(headers_params.dict()) + assert got["headers"] == jsonable_encoder(headers_params.model_dump()) async def test_parse_request_with_invalid_path_params( @@ -221,8 +223,8 @@ async def test_parse_request_with_invalid_path_params( r = await client.get( "/projects/invalid-uuid", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -234,8 +236,8 @@ async def test_parse_request_with_invalid_path_params( "details": [ { "loc": "project_uuid", - "msg": "value is not a valid uuid", - "type": "type_error.uuid", + "msg": "Input should be a valid UUID, invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1", + "type": "uuid_parsing", } ], } @@ -252,8 +254,8 @@ async def test_parse_request_with_invalid_query_params( r = await client.get( f"/projects/{path_params.project_uuid}", params={}, - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -265,8 +267,8 @@ async def test_parse_request_with_invalid_query_params( "details": [ { "loc": "label", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } ], } @@ -284,7 +286,7 @@ async def test_parse_request_with_invalid_body( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), json={"invalid": "body"}, - headers=headers_params.dict(by_alias=True), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -298,13 +300,13 @@ async def test_parse_request_with_invalid_body( "details": [ { "loc": "x", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, { "loc": "z", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -322,7 +324,7 @@ async def test_parse_request_with_invalid_json_body( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), data=b"[ 1 2, 3 'broken-json' ]", - headers=headers_params.dict(by_alias=True), + headers=headers_params.model_dump(by_alias=True), ) body = await r.text() @@ -340,8 +342,8 @@ async def test_parse_request_with_invalid_headers_params( r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(), # we pass the wrong names + json=body.model_dump(), + headers=headers_params.model_dump(), # we pass the wrong names ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -353,8 +355,8 @@ async def test_parse_request_with_invalid_headers_params( "details": [ { "loc": "X-Simcore-User-Agent", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } ], } diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index 75850fddc2e..1962d0b4232 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -107,8 +107,8 @@ async def _commands_handler( ) -> Any: """Handles all commands send by remote party""" if command == "init-context": - context.redis_settings = RedisSettings.parse_raw(payload["redis"]) - context.rabbit_settings = RabbitSettings.parse_raw(payload["rabbit"]) + context.redis_settings = RedisSettings.model_validate_json(payload["redis"]) + context.rabbit_settings = RabbitSettings.model_validate_json(payload["rabbit"]) # using the same db as the deferred tasks with different keys context.in_memory_lists = InMemoryLists(context.redis_settings, port) diff --git a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py index 3ec3bde01ed..366759e22d3 100644 --- a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py +++ b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py @@ -5,7 +5,7 @@ from datetime import timedelta import pytest -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.deferred_tasks._models import TaskUID from servicelib.deferred_tasks._redis_task_tracker import RedisTaskTracker from servicelib.deferred_tasks._task_schedule import TaskScheduleModel, TaskState @@ -19,8 +19,7 @@ @pytest.fixture def task_schedule() -> TaskScheduleModel: - return parse_obj_as( - TaskScheduleModel, + return TypeAdapter(TaskScheduleModel).validate_python( { "timeout": timedelta(seconds=1), "execution_attempts": 1, diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index 0ea55a62eee..9ea22f87ed1 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -16,6 +16,8 @@ import psutil import pytest from aiohttp.test_utils import unused_port +from common_library.serialization import model_dump_with_secrets +from models_library.utils.json_serialization import json_dumps from pydantic import NonNegativeFloat, NonNegativeInt from pytest_mock import MockerFixture from servicelib import redis as servicelib_redis @@ -24,7 +26,6 @@ from servicelib.sequences_utils import partition_gen from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings -from settings_library.utils_encoders import create_json_encoder_wo_secrets from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -123,7 +124,6 @@ async def _tcp_command( def _get_serialization_options() -> dict[str, Any]: return { - "encoder": create_json_encoder_wo_secrets(RabbitSettings), "exclude_defaults": True, "exclude_none": True, "exclude_unset": True, @@ -158,8 +158,20 @@ async def start(self) -> None: response = await _tcp_command( "init-context", { - "rabbit": self.rabbit_service.json(**_get_serialization_options()), - "redis": self.redis_service.json(**_get_serialization_options()), + "rabbit": json_dumps( + model_dump_with_secrets( + self.rabbit_service, + show_secrets=True, + **_get_serialization_options(), + ) + ), + "redis": json_dumps( + model_dump_with_secrets( + self.redis_service, + show_secrets=True, + **_get_serialization_options(), + ) + ), "max-workers": self.max_workers, }, port=self.remote_process.port, diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py index bd55b44d498..52527f138d9 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py @@ -18,7 +18,7 @@ from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.fastapi import long_running_tasks from servicelib.long_running_tasks._models import TaskGet, TaskId from servicelib.long_running_tasks._task import TaskContext @@ -94,7 +94,9 @@ async def _caller(app: FastAPI, client: AsyncClient, **query_kwargs) -> TaskId: ) resp = await client.post(f"{url}") assert resp.status_code == status.HTTP_202_ACCEPTED - task_id = parse_obj_as(long_running_tasks.server.TaskId, resp.json()) + task_id = TypeAdapter(long_running_tasks.server.TaskId).validate_python( + resp.json() + ) return task_id return _caller @@ -122,7 +124,7 @@ async def _waiter( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.parse_obj( + task_status = long_running_tasks.server.TaskStatus.model_validate( result.json() ) assert task_status @@ -149,12 +151,14 @@ async def test_workflow( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.parse_obj(result.json()) + task_status = long_running_tasks.server.TaskStatus.model_validate( + result.json() + ) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) ) - print(f"<-- received task status: {task_status.json(indent=2)}") + print(f"<-- received task status: {task_status.model_dump_json(indent=2)}") assert task_status.done, "task incomplete" print( f"-- waiting for task status completed successfully: {json.dumps(attempt.retry_state.retry_object.statistics, indent=2)}" @@ -179,7 +183,7 @@ async def test_workflow( result = await client.get(f"{result_url}") # NOTE: this is DIFFERENT than with aiohttp where we return the real result assert result.status_code == status.HTTP_200_OK - task_result = long_running_tasks.server.TaskResult.parse_obj(result.json()) + task_result = long_running_tasks.server.TaskResult.model_validate(result.json()) assert not task_result.error assert task_result.result == [f"{x}" for x in range(10)] # getting the result again should raise a 404 @@ -218,7 +222,7 @@ async def test_failing_task_returns_error( result_url = app.url_path_for("get_task_result", task_id=task_id) result = await client.get(f"{result_url}") assert result.status_code == status.HTTP_200_OK - task_result = long_running_tasks.server.TaskResult.parse_obj(result.json()) + task_result = long_running_tasks.server.TaskResult.model_validate(result.json()) assert not task_result.result assert task_result.error @@ -274,7 +278,7 @@ async def test_list_tasks_empty_list(app: FastAPI, client: AsyncClient): list_url = app.url_path_for("list_tasks") result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert list_of_tasks == [] @@ -296,7 +300,7 @@ async def test_list_tasks( list_url = app.url_path_for("list_tasks") result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert len(list_of_tasks) == NUM_TASKS # now wait for them to finish @@ -311,5 +315,5 @@ async def test_list_tasks( # the list shall go down one by one result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert len(list_of_tasks) == NUM_TASKS - (task_index + 1) diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py index 9f15184b052..0dc440bf33e 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py @@ -4,11 +4,12 @@ import asyncio from typing import AsyncIterable, Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import pytest from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import PositiveFloat, TypeAdapter from servicelib.fastapi.long_running_tasks._context_manager import _ProgressManager from servicelib.fastapi.long_running_tasks.client import ( Client, @@ -90,7 +91,7 @@ async def bg_task_app( @pytest.fixture def mock_task_id() -> TaskId: - return parse_obj_as(TaskId, "fake_task_id") + return TypeAdapter(TaskId).validate_python("fake_task_id") async def test_task_result( @@ -100,7 +101,7 @@ async def test_task_result( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) async with periodic_task_result( client, @@ -120,7 +121,7 @@ async def test_task_result_times_out( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) timeout = TASK_SLEEP_INTERVAL / 10 with pytest.raises(TaskClientTimeoutError) as exec_info: @@ -146,7 +147,7 @@ async def test_task_result_task_result_is_an_error( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) with pytest.raises(TaskClientResultError) as exec_info: async with periodic_task_result( @@ -185,13 +186,13 @@ async def progress_update( assert received == ("", None) for _ in range(repeat): - await progress_updater.update(mock_task_id, percent=ProgressPercent(0.0)) + await progress_updater.update(mock_task_id, percent=TypeAdapter(ProgressPercent).validate_python(0.0)) assert counter == 2 assert received == ("", 0.0) for _ in range(repeat): await progress_updater.update( - mock_task_id, percent=ProgressPercent(1.0), message="done" + mock_task_id, percent=TypeAdapter(ProgressPercent).validate_python(1.0), message="done" ) assert counter == 3 assert received == ("done", 1.0) diff --git a/packages/service-library/tests/fastapi/test_docker_utils.py b/packages/service-library/tests/fastapi/test_docker_utils.py index 4db0db99bd0..f6d78066c97 100644 --- a/packages/service-library/tests/fastapi/test_docker_utils.py +++ b/packages/service-library/tests/fastapi/test_docker_utils.py @@ -12,7 +12,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.progress_bar import ProgressReport -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from servicelib import progress_bar from servicelib.docker_utils import pull_image @@ -46,8 +46,7 @@ async def test_retrieve_image_layer_information( if "sha256" in service_tag: image_name = f"{service_repo}@{service_tag}" await remove_images_from_host([image_name]) - docker_image = parse_obj_as( - DockerGenericTag, + docker_image = TypeAdapter(DockerGenericTag).validate_python( f"{registry_settings.REGISTRY_URL}/{osparc_service['image']['name']}:{osparc_service['image']['tag']}", ) layer_information = await retrieve_image_layer_information( @@ -103,13 +102,13 @@ def _assert_progress_report_values( # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=0, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( exclude={"message"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=total, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( exclude={"message"} ) @@ -202,7 +201,7 @@ async def test_pull_image_without_layer_information( assert layer_information print(f"{image=} has {layer_information.layers_total_size=}") - fake_number_of_steps = parse_obj_as(ByteSize, "200MiB") + fake_number_of_steps = TypeAdapter(ByteSize).validate_python("200MiB") assert fake_number_of_steps > layer_information.layers_total_size async with progress_bar.ProgressBarData( num_steps=fake_number_of_steps, diff --git a/packages/service-library/tests/fastapi/test_exceptions_utils.py b/packages/service-library/tests/fastapi/test_exceptions_utils.py index 845043f3405..cfe7fbde0e8 100644 --- a/packages/service-library/tests/fastapi/test_exceptions_utils.py +++ b/packages/service-library/tests/fastapi/test_exceptions_utils.py @@ -10,7 +10,7 @@ from fastapi import FastAPI, HTTPException from httpx import AsyncClient from models_library.api_schemas__common.errors import DefaultApiError -from pydantic import parse_raw_as +from pydantic import TypeAdapter from servicelib.fastapi.exceptions_utils import ( handle_errors_as_500, http_exception_as_json_response, @@ -66,7 +66,7 @@ async def test_http_errors_respond_with_error_model( response = await client.post(f"/error/{code}") assert response.status_code == code - error = parse_raw_as(DefaultApiError, response.text) + error = TypeAdapter(DefaultApiError).validate_json(response.text) assert error.detail == f"test {code}" assert error.name @@ -79,4 +79,4 @@ async def test_non_http_error_handling( response = await client.post(f"/raise/{code}") print(response) - error = parse_raw_as(DefaultApiError, response.text) + error = TypeAdapter(DefaultApiError).validate_json(response.text) diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index f98de720c33..7bd96b25eee 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -3,6 +3,7 @@ import logging from collections.abc import AsyncIterable, Iterable from typing import Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import arrow import pytest @@ -15,7 +16,7 @@ TransportError, codes, ) -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from respx import MockRouter from servicelib.fastapi.http_client_thin import ( BaseThinClient, @@ -76,12 +77,14 @@ async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: @pytest.fixture -def test_url() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") +def test_url() -> str: + url =TypeAdapter(AnyHttpUrlLegacy).validate_python("http://missing-host:1111") + return f"{url}" async def test_connection_error( - thick_client: FakeThickClient, test_url: AnyHttpUrl + thick_client: FakeThickClient, + test_url: str, ) -> None: with pytest.raises(ClientHttpError) as exe_info: await thick_client.get_provided_url(test_url) @@ -92,7 +95,7 @@ async def test_connection_error( async def test_retry_on_errors( request_timeout: int, - test_url: AnyHttpUrl, + test_url: str, caplog_info_level: pytest.LogCaptureFixture, ) -> None: client = FakeThickClient(total_retry_interval=request_timeout) @@ -108,7 +111,7 @@ async def test_retry_on_errors_by_error_type( error_class: type[RequestError], caplog_info_level: pytest.LogCaptureFixture, request_timeout: int, - test_url: AnyHttpUrl, + test_url: str, ) -> None: class ATestClient(BaseThinClient): # pylint: disable=no-self-use @@ -177,7 +180,7 @@ async def public_method_no_annotation(self): async def test_expect_state_decorator( - test_url: AnyHttpUrl, + test_url: str, respx_mock: MockRouter, request_timeout: int, ) -> None: diff --git a/packages/service-library/tests/fastapi/test_openapi.py b/packages/service-library/tests/fastapi/test_openapi.py index 0edd438c73a..54f7e017799 100644 --- a/packages/service-library/tests/fastapi/test_openapi.py +++ b/packages/service-library/tests/fastapi/test_openapi.py @@ -44,7 +44,7 @@ def test_exclusive_min_openapi_issue(app: FastAPI): def test_overriding_openapi_method(app: FastAPI): assert not hasattr(app, "_original_openapi") - assert app.openapi.__doc__ is None + #assert app.openapi.__doc__ is None # PC why was this set to check that it is none? it's coming from the base fastapi applicaiton and now they provide some docs override_fastapi_openapi_method(app) diff --git a/packages/service-library/tests/fastapi/test_rabbitmq.py b/packages/service-library/tests/fastapi/test_rabbitmq.py index 9c94cfa0766..b41a94097f2 100644 --- a/packages/service-library/tests/fastapi/test_rabbitmq.py +++ b/packages/service-library/tests/fastapi/test_rabbitmq.py @@ -132,6 +132,6 @@ async def test_post_message( f"--> checking for message in rabbit exchange {rabbit_message.channel_name}, {attempt.retry_state.retry_object.statistics}" ) mocked_message_handler.assert_called_once_with( - rabbit_message.json().encode() + rabbit_message.model_dump_json().encode() ) print("... message received") diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py index 3019b07d6ab..ba7576e3027 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py @@ -86,13 +86,13 @@ async def test_rabbit_client_with_paused_container( await rabbit_client.publish(exchange_name, message) -def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, str]: +def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, Any]: return { "scheme": "http", - "user": rabbit_service.RABBIT_USER, + "username": rabbit_service.RABBIT_USER, "password": rabbit_service.RABBIT_PASSWORD.get_secret_value(), "host": rabbit_service.RABBIT_HOST, - "port": "15672", + "port": 15672, } diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py index e192afc611e..46588de6e87 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py @@ -350,11 +350,12 @@ async def _a_handler() -> None: pass if expect_fail: - with pytest.raises(ValidationError) as exec_info: + with pytest.raises( + ValidationError, match="String should have at most 255 characters" + ): await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler ) - assert "ensure this value has at most 255 characters" in f"{exec_info.value}" else: await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py index b07f8e8cb8d..2615a92ac56 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py @@ -26,18 +26,18 @@ def test_rpc_namespace_sorts_elements(): def test_rpc_namespace_too_long(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({f"test{i}": f"test{i}" for i in range(20)}) - assert "ensure this value has at most 252 characters" in f"{exec_info.value}" + assert "String should have at most 252 characters" in f"{exec_info.value}" @pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() # no rabbitmq instance running def test_rpc_namespace_too_short(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({}) - assert "ensure this value has at least 1 characters" in f"{exec_info.value}" + assert "String should have at least 1 character" in f"{exec_info.value}" @pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() # no rabbitmq instance running def test_rpc_namespace_invalid_symbols(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({"test": "@"}) - assert "string does not match regex" in f"{exec_info.value}" + assert "String should match pattern" in f"{exec_info.value}" diff --git a/packages/service-library/tests/test_archiving_utils.py b/packages/service-library/tests/test_archiving_utils.py index f6886ea509a..bb6f2b486c4 100644 --- a/packages/service-library/tests/test_archiving_utils.py +++ b/packages/service-library/tests/test_archiving_utils.py @@ -18,7 +18,7 @@ import pytest from faker import Faker -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture from servicelib import archiving_utils from servicelib.archiving_utils import ArchiveError, archive_dir, unarchive_dir @@ -566,7 +566,8 @@ async def _archive_dir_performance( @pytest.mark.skip(reason="manual testing") @pytest.mark.parametrize( - "compress, file_size, num_files", [(False, parse_obj_as(ByteSize, "1Mib"), 10000)] + "compress, file_size, num_files", + [(False, TypeAdapter(ByteSize).validate_python("1Mib"), 10000)], ) def test_archive_dir_performance( benchmark: BenchmarkFixture, diff --git a/packages/settings-library/requirements/_base.in b/packages/settings-library/requirements/_base.in index 9672f593a7f..91f4dd23b04 100644 --- a/packages/settings-library/requirements/_base.in +++ b/packages/settings-library/requirements/_base.in @@ -2,12 +2,10 @@ # Specifies third-party dependencies for 'settings-library' # --constraint ../../../requirements/constraints.txt - -# intra-repo required dependencies --requirement ../../../packages/common-library/requirements/_base.in -pydantic>=1.9 - +pydantic +pydantic-settings # extra rich diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index 900c4fea2aa..fc4a246dc95 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -1,15 +1,26 @@ +annotated-types==0.7.0 + # via pydantic click==8.1.7 # via typer markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/_base.in pygments==2.18.0 # via rich +python-dotenv==1.0.1 + # via pydantic-settings rich==13.7.1 # via # -r requirements/_base.in @@ -21,4 +32,5 @@ typer==0.12.4 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index 1ca7d43dd3c..56bf15d9c2d 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -2,8 +2,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in iniconfig==2.0.0 @@ -34,12 +32,10 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in six==1.16.0 # via python-dateutil termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt index a75c5397d80..d14257822b0 100644 --- a/packages/settings-library/requirements/_tools.txt +++ b/packages/settings-library/requirements/_tools.txt @@ -67,21 +67,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index d950945b44b..aeacf04c2e5 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -11,8 +11,8 @@ --requirement _test.txt # installs this repo's packages -pytest-simcore @ ../pytest-simcore simcore-common-library @ ../common-library/ +pytest-simcore @ ../pytest-simcore # current module simcore-settings-library @ . diff --git a/packages/settings-library/requirements/dev.txt b/packages/settings-library/requirements/dev.txt index 2168d7f36e1..de2adb4ecbb 100644 --- a/packages/settings-library/requirements/dev.txt +++ b/packages/settings-library/requirements/dev.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages needed to develop 'models-library' +# Shortcut to install all packages needed to develop 'settings-library' # # - As ci.txt but with current and repo packages in develop (edit) mode # diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 296b453e26c..c3f0e103e7a 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,19 +1,12 @@ import logging -from collections.abc import Sequence from functools import cached_property -from typing import Final, get_args, get_origin - -from pydantic import ( - BaseConfig, - BaseSettings, - ConfigError, - Extra, - ValidationError, - validator, -) -from pydantic.error_wrappers import ErrorList, ErrorWrapper -from pydantic.fields import ModelField, Undefined -from pydantic.typing import is_literal_type +from typing import Any, Final, get_origin + +from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable +from pydantic import ValidationInfo, field_validator +from pydantic.fields import FieldInfo +from pydantic_core import PydanticUndefined, ValidationError +from pydantic_settings import BaseSettings, SettingsConfigDict _logger = logging.getLogger(__name__) @@ -22,41 +15,33 @@ ] = "%s auto_default_from_env unresolved, defaulting to None" -class DefaultFromEnvFactoryError(ValidationError): - ... +class DefaultFromEnvFactoryError(ValueError): + def __init__(self, errors): + super().__init__() + self.errors = errors -def create_settings_from_env(field: ModelField): +def _create_settings_from_env(field_name: str, info: FieldInfo): # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) # this value is still not resolved (field.type_ at that moment has a weak_ref). # Therefore we keep the entire 'field' but MUST be treated here as read-only def _default_factory(): """Creates default from sub-settings or None (if nullable)""" - field_settings_cls = field.type_ + field_settings_cls = get_type(info) try: return field_settings_cls() except ValidationError as err: - if field.allow_none: + if is_nullable(info): # e.g. Optional[PostgresSettings] would warn if defaults to None _logger.warning( _DEFAULTS_TO_NONE_MSG, - field.name, + field_name, ) return None - - def _prepend_field_name(ee: ErrorList): - if isinstance(ee, ErrorWrapper): - return ErrorWrapper(ee.exc, (field.name, *ee.loc_tuple())) - assert isinstance(ee, Sequence) # nosec - return [_prepend_field_name(e) for e in ee] - - raise DefaultFromEnvFactoryError( - errors=_prepend_field_name(err.raw_errors), - model=err.model, - # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field - ) from err + _logger.warning("Validation errors=%s", err.errors()) + raise DefaultFromEnvFactoryError(errors=err.errors()) from err return _default_factory @@ -70,40 +55,46 @@ class BaseCustomSettings(BaseSettings): SEE tests for details. """ - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, field: ModelField): + def _parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if field.allow_none and isinstance(v, str) and v.lower() in ("null", "none"): + if ( + info.field_name + and is_nullable(cls.model_fields[info.field_name]) + and isinstance(v, str) + and v.lower() in ("none",) + ): return None return v - class Config(BaseConfig): - case_sensitive = True # All must be capitalized - extra = Extra.forbid - allow_mutation = False - frozen = True - validate_all = True - keep_untouched = (cached_property,) - - @classmethod - def prepare_field(cls, field: ModelField) -> None: - super().prepare_field(field) + model_config = SettingsConfigDict( + case_sensitive=True, # All must be capitalized + extra="forbid", + frozen=True, + validate_default=True, + ignored_types=(cached_property,), + env_parse_none_str="null", + ) - auto_default_from_env = field.field_info.extra.get( - "auto_default_from_env", False + @classmethod + def __pydantic_init_subclass__(cls, **kwargs: Any): + super().__pydantic_init_subclass__(**kwargs) + + for name, field in cls.model_fields.items(): + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get( # type: ignore[union-attr] + "auto_default_from_env", False + ) ) - - field_type = field.type_ - if args := get_args(field_type): - field_type = next(a for a in args if a != type(None)) + field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models is_not_composed = ( get_origin(field_type) is None ) # is not composed as dict[str, Any] or Generic[Base] - # avoid literals raising TypeError - is_not_literal = is_literal_type(field.type_) is False + is_not_literal = not is_literal(field) if ( is_not_literal @@ -111,25 +102,26 @@ def prepare_field(cls, field: ModelField) -> None: and issubclass(field_type, BaseCustomSettings) ): if auto_default_from_env: - assert field.field_info.default is Undefined - assert field.field_info.default_factory is None + assert field.default is PydanticUndefined + assert field.default_factory is None # Transform it into something like `Field(default_factory=create_settings_from_env(field))` - field.default_factory = create_settings_from_env(field) + field.default_factory = _create_settings_from_env(name, field) field.default = None - field.required = False # has a default now elif ( is_not_literal and is_not_composed and issubclass(field_type, BaseSettings) ): - msg = f"{cls}.{field.name} of type {field_type} must inherit from BaseCustomSettings" - raise ConfigError(msg) + msg = f"{cls}.{name} of type {field_type} must inherit from BaseCustomSettings" + raise ValueError(msg) elif auto_default_from_env: - msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{field.name} is {field_type} " - raise ConfigError(msg) + msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclasses but field {cls}.{name} is {field_type} " + raise ValueError(msg) + + cls.model_rebuild(force=True) @classmethod def create_from_envs(cls, **overrides): diff --git a/packages/settings-library/src/settings_library/basic_types.py b/packages/settings-library/src/settings_library/basic_types.py index 277832669e1..d912b5d4e39 100644 --- a/packages/settings-library/src/settings_library/basic_types.py +++ b/packages/settings-library/src/settings_library/basic_types.py @@ -3,21 +3,17 @@ # This is a minor evil to avoid the maintenance burden that creates # an extra dependency to a larger models_library (intra-repo library) -import re from enum import Enum +from typing import Annotated, TypeAlias -from pydantic import ConstrainedInt, ConstrainedStr - +from pydantic import Field, StringConstraints # port number range -class PortInt(ConstrainedInt): - gt = 0 - lt = 65535 +PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)] # e.g. 'v5' -class VersionTag(ConstrainedStr): - regex = re.compile(r"^v\d$") +VersionTag: TypeAlias = Annotated[str, StringConstraints(pattern=r"^v\d$")] class LogLevel(str, Enum): @@ -55,7 +51,6 @@ class BuildTargetEnum(str, Enum): # non-empty bounded string used as identifier # e.g. "123" or "name_123" or "fa327c73-52d8-462a-9267-84eeaf0f90e3" but NOT "" -class IDStr(ConstrainedStr): - strip_whitespace = True - min_length = 1 - max_length = 50 +IDStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1, max_length=50) +] diff --git a/packages/settings-library/src/settings_library/catalog.py b/packages/settings-library/src/settings_library/catalog.py index e5f44f29269..17c71237e81 100644 --- a/packages/settings-library/src/settings_library/catalog.py +++ b/packages/settings-library/src/settings_library/catalog.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class CatalogSettings(BaseCustomSettings, MixinServiceSettings): CATALOG_HOST: str = "catalog" CATALOG_PORT: PortInt = DEFAULT_FASTAPI_PORT - CATALOG_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + CATALOG_VTAG: VersionTag = "v0" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/comp_services.py b/packages/settings-library/src/settings_library/comp_services.py index e3cb628f7b7..71901e61624 100644 --- a/packages/settings-library/src/settings_library/comp_services.py +++ b/packages/settings-library/src/settings_library/comp_services.py @@ -1,5 +1,4 @@ -from pydantic import ByteSize, NonNegativeInt, validator -from pydantic.tools import parse_raw_as +from pydantic import ByteSize, NonNegativeInt, TypeAdapter, field_validator from settings_library.base import BaseCustomSettings from ._constants import GB @@ -10,19 +9,19 @@ class ComputationalServices(BaseCustomSettings): DEFAULT_MAX_NANO_CPUS: NonNegativeInt = _DEFAULT_MAX_NANO_CPUS_VALUE - DEFAULT_MAX_MEMORY: ByteSize = parse_raw_as( - ByteSize, f"{_DEFAULT_MAX_MEMORY_VALUE}" + DEFAULT_MAX_MEMORY: ByteSize = TypeAdapter(ByteSize).validate_python( + f"{_DEFAULT_MAX_MEMORY_VALUE}" ) DEFAULT_RUNTIME_TIMEOUT: NonNegativeInt = 0 - @validator("DEFAULT_MAX_NANO_CPUS", pre=True) + @field_validator("DEFAULT_MAX_NANO_CPUS", mode="before") @classmethod def _set_default_cpus_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: v = _DEFAULT_MAX_NANO_CPUS_VALUE return v - @validator("DEFAULT_MAX_MEMORY", pre=True) + @field_validator("DEFAULT_MAX_MEMORY", mode="before") @classmethod def _set_default_memory_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: diff --git a/packages/settings-library/src/settings_library/director_v2.py b/packages/settings-library/src/settings_library/director_v2.py index 78c5edd78c6..baf32956c8e 100644 --- a/packages/settings-library/src/settings_library/director_v2.py +++ b/packages/settings-library/src/settings_library/director_v2.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class DirectorV2Settings(BaseCustomSettings, MixinServiceSettings): DIRECTOR_V2_HOST: str = "director-v2" DIRECTOR_V2_PORT: PortInt = DEFAULT_FASTAPI_PORT - DIRECTOR_V2_VTAG: VersionTag = parse_obj_as(VersionTag, "v2") + DIRECTOR_V2_VTAG: VersionTag = "v2" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index bb365cb9785..e899ce45718 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -1,7 +1,8 @@ from functools import cached_property -from typing import Any, ClassVar +from typing import Any -from pydantic import Field, SecretStr, validator +from pydantic import Field, SecretStr, field_validator +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -23,7 +24,7 @@ class RegistrySettings(BaseCustomSettings): ) REGISTRY_SSL: bool = Field(..., description="access to registry through ssl") - @validator("REGISTRY_PATH", pre=True) + @field_validator("REGISTRY_PATH", mode="before") @classmethod def _escape_none_string(cls, v) -> Any | None: return None if v == "None" else v @@ -36,8 +37,8 @@ def resolved_registry_url(self) -> str: def api_url(self) -> str: return f"{self.REGISTRY_URL}/v2" - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { "REGISTRY_AUTH": "True", @@ -48,3 +49,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index 2cd7cf0b9a6..22d2d9af9ee 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,20 +1,23 @@ -from typing import Any, ClassVar +from typing import Annotated -from pydantic import Field +from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + class EC2Settings(BaseCustomSettings): EC2_ACCESS_KEY_ID: str - EC2_ENDPOINT: str | None = Field( - default=None, description="do not define if using standard AWS" - ) + EC2_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") EC2_REGION_NAME: str = "us-east-1" EC2_SECRET_ACCESS_KEY: str - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { "EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -24,3 +27,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/efs.py b/packages/settings-library/src/settings_library/efs.py index d09b8abb20f..34c48f9dca6 100644 --- a/packages/settings-library/src/settings_library/efs.py +++ b/packages/settings-library/src/settings_library/efs.py @@ -8,7 +8,7 @@ class AwsEfsSettings(BaseCustomSettings): EFS_DNS_NAME: str = Field( description="AWS Elastic File System DNS name", - example="fs-xxx.efs.us-east-1.amazonaws.com", + examples=["fs-xxx.efs.us-east-1.amazonaws.com"], ) EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str EFS_MOUNTED_PATH: Path = Field( @@ -16,7 +16,7 @@ class AwsEfsSettings(BaseCustomSettings): ) EFS_ONLY_ENABLED_FOR_USERIDS: list[int] = Field( description="This is temporary solution so we can enable it for specific users for testing purpose", - example=[1], + examples=[[1]], ) diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index b15bf209405..fe5f8448b34 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -1,6 +1,7 @@ from enum import Enum +from typing import Self -from pydantic import root_validator +from pydantic import model_validator from pydantic.fields import Field from pydantic.types import SecretStr @@ -31,25 +32,23 @@ class SMTPSettings(BaseCustomSettings): SMTP_USERNAME: str | None = Field(None, min_length=1) SMTP_PASSWORD: SecretStr | None = Field(None, min_length=1) - @root_validator - @classmethod - def _both_credentials_must_be_set(cls, values): - username = values.get("SMTP_USERNAME") - password = values.get("SMTP_PASSWORD") + @model_validator(mode="after") + def _both_credentials_must_be_set(self) -> Self: + username = self.SMTP_USERNAME + password = self.SMTP_PASSWORD if username is None and password or username and password is None: msg = f"Please provide both {username=} and {password=} not just one" raise ValueError(msg) - return values + return self - @root_validator - @classmethod - def _enabled_tls_required_authentication(cls, values): - smtp_protocol = values.get("SMTP_PROTOCOL") + @model_validator(mode="after") + def _enabled_tls_required_authentication(self) -> Self: + smtp_protocol = self.SMTP_PROTOCOL - username = values.get("SMTP_USERNAME") - password = values.get("SMTP_PASSWORD") + username = self.SMTP_USERNAME + password = self.SMTP_PASSWORD tls_enabled = smtp_protocol == EmailProtocol.TLS starttls_enabled = smtp_protocol == EmailProtocol.STARTTLS @@ -57,7 +56,7 @@ def _enabled_tls_required_authentication(cls, values): if (tls_enabled or starttls_enabled) and not (username or password): msg = "when using SMTP_PROTOCOL other than UNENCRYPTED username and password are required" raise ValueError(msg) - return values + return self @property def has_credentials(self) -> bool: diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index 2a5d12f1bd7..522fcdd0991 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -1,7 +1,7 @@ from datetime import timedelta from typing import Final -from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr, root_validator +from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr, model_validator from .base import BaseCustomSettings from .postgres import PostgresSettings @@ -11,8 +11,8 @@ class StorageAuthSettings(StorageSettings): - STORAGE_USERNAME: str | None - STORAGE_PASSWORD: SecretStr | None + STORAGE_USERNAME: str | None = None + STORAGE_PASSWORD: SecretStr | None = None STORAGE_SECURE: bool = False @property @@ -21,11 +21,11 @@ def auth_required(self) -> bool: # for details see https://github.com/ITISFoundation/osparc-issues/issues/1264 return self.STORAGE_USERNAME is not None and self.STORAGE_PASSWORD is not None - @root_validator + @model_validator(mode="after") @classmethod def _validate_auth_fields(cls, values): - username = values["STORAGE_USERNAME"] - password = values["STORAGE_PASSWORD"] + username = values.STORAGE_USERNAME + password = values.STORAGE_PASSWORD if (username is None) != (password is None): msg = f"Both {username=} and {password=} must be either set or unset!" raise ValueError(msg) @@ -33,9 +33,13 @@ def _validate_auth_fields(cls, values): class NodePortsSettings(BaseCustomSettings): - NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field(auto_default_from_env=True) + NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - POSTGRES_SETTINGS: PostgresSettings = Field(auto_default_from_env=True) + POSTGRES_SETTINGS: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) NODE_PORTS_MULTIPART_UPLOAD_COMPLETION_TIMEOUT_S: NonNegativeInt = int( timedelta(minutes=5).total_seconds() diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index f8335bbeed2..ed7377877be 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,8 +1,15 @@ import urllib.parse from functools import cached_property -from typing import Any, ClassVar -from pydantic import Field, PostgresDsn, SecretStr, validator +from pydantic import ( + AliasChoices, + Field, + PostgresDsn, + SecretStr, + ValidationInfo, + field_validator, +) +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import PortInt @@ -11,7 +18,7 @@ class PostgresSettings(BaseCustomSettings): # entrypoint POSTGRES_HOST: str - POSTGRES_PORT: PortInt = PortInt(5432) + POSTGRES_PORT: PortInt = 5432 # auth POSTGRES_USER: str @@ -31,43 +38,47 @@ class PostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( default=None, description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", - env=[ + validation_alias=AliasChoices( "POSTGRES_CLIENT_NAME", # This is useful when running inside a docker container, then the hostname is set each client gets a different name "HOST", "HOSTNAME", - ], + ), ) - @validator("POSTGRES_MAXSIZE") + @field_validator("POSTGRES_MAXSIZE") @classmethod - def _check_size(cls, v, values): - if not (values["POSTGRES_MINSIZE"] <= v): - msg = f"assert POSTGRES_MINSIZE={values['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" + def _check_size(cls, v, info: ValidationInfo): + if info.data["POSTGRES_MINSIZE"] > v: + msg = f"assert POSTGRES_MINSIZE={info.data['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" raise ValueError(msg) return v @cached_property def dsn(self) -> str: - dsn: str = PostgresDsn.build( - scheme="postgresql", - user=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=f"{self.POSTGRES_PORT}", - path=f"/{self.POSTGRES_DB}", + dsn: str = str( + PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", + ) ) return dsn @cached_property def dsn_with_async_sqlalchemy(self) -> str: - dsn: str = PostgresDsn.build( - scheme="postgresql+asyncpg", - user=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=f"{self.POSTGRES_PORT}", - path=f"/{self.POSTGRES_DB}", + dsn: str = str( + PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql+asyncpg", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", + ) ) return dsn @@ -81,8 +92,8 @@ def dsn_with_query(self) -> str: ) return dsn - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ # minimal required { @@ -94,3 +105,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/prometheus.py b/packages/settings-library/src/settings_library/prometheus.py index 065c7e930f0..9c40293d463 100644 --- a/packages/settings-library/src/settings_library/prometheus.py +++ b/packages/settings-library/src/settings_library/prometheus.py @@ -9,7 +9,7 @@ class PrometheusSettings(BaseCustomSettings, MixinServiceSettings): PROMETHEUS_URL: AnyUrl - PROMETHEUS_VTAG: VersionTag = VersionTag("v1") + PROMETHEUS_VTAG: VersionTag = "v1" PROMETHEUS_USERNAME: str | None = None PROMETHEUS_PASSWORD: SecretStr | None = None @@ -24,14 +24,16 @@ def origin(self) -> str: @cached_property def api_url(self) -> str: assert self.PROMETHEUS_URL.host # nosec - prometheus_url: str = AnyUrl.build( - scheme=self.PROMETHEUS_URL.scheme, - user=self.PROMETHEUS_USERNAME, - password=self.PROMETHEUS_PASSWORD.get_secret_value() - if self.PROMETHEUS_PASSWORD - else None, - host=self.PROMETHEUS_URL.host, - port=self.PROMETHEUS_URL.port, - path=self.PROMETHEUS_URL.path, + prometheus_url: str = str( + AnyUrl.build( + scheme=self.PROMETHEUS_URL.scheme, + username=self.PROMETHEUS_USERNAME, + password=self.PROMETHEUS_PASSWORD.get_secret_value() + if self.PROMETHEUS_PASSWORD + else None, + host=self.PROMETHEUS_URL.host, + port=self.PROMETHEUS_URL.port, + path=self.PROMETHEUS_URL.path, + ) ) return prometheus_url diff --git a/packages/settings-library/src/settings_library/r_clone.py b/packages/settings-library/src/settings_library/r_clone.py index ff04d509bef..c4288466928 100644 --- a/packages/settings-library/src/settings_library/r_clone.py +++ b/packages/settings-library/src/settings_library/r_clone.py @@ -13,7 +13,7 @@ class S3Provider(StrEnum): class RCloneSettings(BaseCustomSettings): - R_CLONE_S3: S3Settings = Field(auto_default_from_env=True) + R_CLONE_S3: S3Settings = Field(json_schema_extra={"auto_default_from_env": True}) R_CLONE_PROVIDER: S3Provider # SEE https://rclone.org/docs/#transfers-n diff --git a/packages/settings-library/src/settings_library/rabbit.py b/packages/settings-library/src/settings_library/rabbit.py index 19c6af0b656..e2cc2e271ce 100644 --- a/packages/settings-library/src/settings_library/rabbit.py +++ b/packages/settings-library/src/settings_library/rabbit.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from pydantic.networks import AnyUrl from pydantic.types import SecretStr @@ -15,7 +14,7 @@ class RabbitDsn(AnyUrl): class RabbitSettings(BaseCustomSettings): # host RABBIT_HOST: str - RABBIT_PORT: PortInt = parse_obj_as(PortInt, 5672) + RABBIT_PORT: PortInt = 5672 RABBIT_SECURE: bool # auth @@ -24,11 +23,13 @@ class RabbitSettings(BaseCustomSettings): @cached_property def dsn(self) -> str: - rabbit_dsn: str = RabbitDsn.build( - scheme="amqps" if self.RABBIT_SECURE else "amqp", - user=self.RABBIT_USER, - password=self.RABBIT_PASSWORD.get_secret_value(), - host=self.RABBIT_HOST, - port=f"{self.RABBIT_PORT}", + rabbit_dsn: str = str( + RabbitDsn.build( + scheme="amqps" if self.RABBIT_SECURE else "amqp", + username=self.RABBIT_USER, + password=self.RABBIT_PASSWORD.get_secret_value(), + host=self.RABBIT_HOST, + port=self.RABBIT_PORT, + ) ) return rabbit_dsn diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index ecccad69c10..6f9d7ad9d38 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pydantic.networks import RedisDsn from pydantic.types import SecretStr @@ -22,20 +22,22 @@ class RedisDatabase(int, Enum): class RedisSettings(BaseCustomSettings): # host REDIS_HOST: str = "redis" - REDIS_PORT: PortInt = parse_obj_as(PortInt, 6789) + REDIS_PORT: PortInt = TypeAdapter(PortInt).validate_python(6789) # auth REDIS_USER: str | None = None REDIS_PASSWORD: SecretStr | None = None - def build_redis_dsn(self, db_index: RedisDatabase): - return RedisDsn.build( - scheme="redis", - user=self.REDIS_USER or None, - password=( - self.REDIS_PASSWORD.get_secret_value() if self.REDIS_PASSWORD else None - ), - host=self.REDIS_HOST, - port=f"{self.REDIS_PORT}", - path=f"/{db_index}", + def build_redis_dsn(self, db_index: RedisDatabase) -> str: + return str( + RedisDsn.build( # pylint: disable=no-member + scheme="redis", + username=self.REDIS_USER or None, + password=( + self.REDIS_PASSWORD.get_secret_value() if self.REDIS_PASSWORD else None + ), + host=self.REDIS_HOST, + port=self.REDIS_PORT, + path=f"/{db_index}", + ) ) diff --git a/packages/settings-library/src/settings_library/resource_usage_tracker.py b/packages/settings-library/src/settings_library/resource_usage_tracker.py index dc696fab76c..d0df8f093ad 100644 --- a/packages/settings-library/src/settings_library/resource_usage_tracker.py +++ b/packages/settings-library/src/settings_library/resource_usage_tracker.py @@ -1,7 +1,6 @@ from datetime import timedelta from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -16,7 +15,7 @@ class ResourceUsageTrackerSettings(BaseCustomSettings, MixinServiceSettings): RESOURCE_USAGE_TRACKER_HOST: str = "resource-usage-tracker" RESOURCE_USAGE_TRACKER_PORT: PortInt = DEFAULT_FASTAPI_PORT - RESOURCE_USAGE_TRACKER_VTAG: VersionTag = parse_obj_as(VersionTag, "v1") + RESOURCE_USAGE_TRACKER_VTAG: VersionTag = "v1" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index cef1bf11be5..95268b41920 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,22 +1,25 @@ -from typing import Any, ClassVar +from typing import Annotated -from pydantic import AnyHttpUrl, Field +from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import IDStr +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + S3_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") S3_REGION: IDStr S3_SECRET_KEY: IDStr - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { # non AWS use-case @@ -35,3 +38,4 @@ class Config(BaseCustomSettings.Config): }, ], } + ) diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 32b965fa123..73bee409464 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,20 +1,22 @@ -from typing import Any, ClassVar +from typing import Annotated -from pydantic import AnyHttpUrl, Field, SecretStr +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter +from pydantic import BeforeValidator, Field, SecretStr +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr - SSM_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + SSM_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(AnyHttpUrlLegacyAdapter.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = SettingsConfigDict( + json_schema_extra={ "examples": [ { "SSM_ACCESS_KEY_ID": "my_access_key_id", @@ -24,3 +26,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/storage.py b/packages/settings-library/src/settings_library/storage.py index 92ec0301257..00ef1987037 100644 --- a/packages/settings-library/src/settings_library/storage.py +++ b/packages/settings-library/src/settings_library/storage.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class StorageSettings(BaseCustomSettings, MixinServiceSettings): STORAGE_HOST: str = "storage" STORAGE_PORT: PortInt = DEFAULT_AIOHTTP_PORT - STORAGE_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + STORAGE_VTAG: VersionTag = "v0" @cached_property def base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/tracing.py b/packages/settings-library/src/settings_library/tracing.py index 28a11cbbf6a..85b7abae5ff 100644 --- a/packages/settings-library/src/settings_library/tracing.py +++ b/packages/settings-library/src/settings_library/tracing.py @@ -1,4 +1,4 @@ -from pydantic import AnyUrl, Field, parse_obj_as +from pydantic import AliasChoices, AnyUrl, Field, TypeAdapter from .base import BaseCustomSettings @@ -7,15 +7,15 @@ class TracingSettings(BaseCustomSettings): TRACING_ZIPKIN_ENDPOINT: AnyUrl = Field( - default=parse_obj_as(AnyUrl, "http://jaeger:9411"), + default=TypeAdapter(AnyUrl).validate_python("http://jaeger:9411"), # NOSONAR description="Zipkin compatible endpoint", ) TRACING_THRIFT_COMPACT_ENDPOINT: AnyUrl = Field( - default=parse_obj_as(AnyUrl, "http://jaeger:5775"), + default=TypeAdapter(AnyUrl).validate_python("http://jaeger:5775"), # NOSONAR description="accept zipkin.thrift over compact thrift protocol (deprecated, used by legacy clients only)", ) TRACING_CLIENT_NAME: str = Field( default=UNDEFINED_CLIENT_NAME, description="Name of the application connecting the tracing service", - env=["HOST", "HOSTNAME", "TRACING_CLIENT_NAME"], + validation_alias=AliasChoices("HOST", "HOSTNAME", "TRACING_CLIENT_NAME"), ) diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index eb4ec0c707a..b63e35caf61 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -6,29 +6,25 @@ """ -import re -from re import Pattern +from typing import Annotated, TypeAlias -from pydantic import ConstrainedStr, Field, parse_obj_as +from pydantic import BeforeValidator, Field, StringConstraints, TypeAdapter from .base import BaseCustomSettings - -class CountryCodeStr(ConstrainedStr): - # Based on https://countrycode.org/ - strip_whitespace: bool = True - regex: Pattern[str] | None = re.compile(r"^\d{1,4}") - - class Config: - frozen = True +# Based on https://countrycode.org/ +CountryCodeStr: TypeAlias = Annotated[ + str, + BeforeValidator(str), + StringConstraints(strip_whitespace=True, pattern=r"^\d{1,4}"), +] class TwilioSettings(BaseCustomSettings): TWILIO_ACCOUNT_SID: str = Field(..., description="Twilio account String Identifier") TWILIO_AUTH_TOKEN: str = Field(..., description="API tokens") TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT: list[CountryCodeStr] = Field( - default=parse_obj_as( - list[CountryCodeStr], + default=TypeAdapter(list[CountryCodeStr]).validate_python( [ "41", ], diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 79d0e1ac145..2c1ab37116d 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -1,3 +1,4 @@ +import json import logging import os from collections.abc import Callable @@ -6,12 +7,12 @@ import rich import typer +from common_library.serialization import model_dump_with_secrets from pydantic import ValidationError -from pydantic.env_settings import BaseSettings +from pydantic_settings import BaseSettings from ._constants import HEADER_STR from .base import BaseCustomSettings -from .utils_encoders import create_json_encoder_wo_secrets def print_as_envfile( @@ -24,14 +25,15 @@ def print_as_envfile( ): exclude_unset = pydantic_export_options.get("exclude_unset", False) - for field in settings_obj.__fields__.values(): - auto_default_from_env = field.field_info.extra.get( - "auto_default_from_env", False + for name, field in settings_obj.model_fields.items(): + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get("auto_default_from_env", False) ) - value = getattr(settings_obj, field.name) + value = getattr(settings_obj, name) - if exclude_unset and field.name not in settings_obj.__fields_set__: + if exclude_unset and name not in settings_obj.model_fields_set: if not auto_default_from_env: continue if value is None: @@ -39,10 +41,14 @@ def print_as_envfile( if isinstance(value, BaseSettings): if compact: - value = f"'{value.json(**pydantic_export_options)}'" # flat + value = json.dumps( + model_dump_with_secrets( + value, show_secrets=show_secrets, **pydantic_export_options + ) + ) # flat else: if verbose: - typer.echo(f"\n# --- {field.name} --- ") + typer.echo(f"\n# --- {name} --- ") print_as_envfile( value, compact=False, @@ -54,17 +60,22 @@ def print_as_envfile( elif show_secrets and hasattr(value, "get_secret_value"): value = value.get_secret_value() - if verbose: - field_info = field.field_info - if field_info.description: - typer.echo(f"# {field_info.description}") + if verbose and field.description: + typer.echo(f"# {field.description}") - typer.echo(f"{field.name}={value}") + typer.echo(f"{name}={value}") -def print_as_json(settings_obj, *, compact=False, **pydantic_export_options): +def print_as_json( + settings_obj, *, compact=False, show_secrets, **pydantic_export_options +): typer.echo( - settings_obj.json(indent=None if compact else 2, **pydantic_export_options) + json.dumps( + model_dump_with_secrets( + settings_obj, show_secrets=show_secrets, **pydantic_export_options + ), + indent=None if compact else 2, + ) ) @@ -128,14 +139,14 @@ def settings( raise pydantic_export_options: dict[str, Any] = {"exclude_unset": exclude_unset} - if show_secrets: - # NOTE: this option is for json-only - pydantic_export_options["encoder"] = create_json_encoder_wo_secrets( - settings_cls - ) if as_json: - print_as_json(settings_obj, compact=compact, **pydantic_export_options) + print_as_json( + settings_obj, + compact=compact, + show_secrets=show_secrets, + **pydantic_export_options, + ) else: print_as_envfile( settings_obj, diff --git a/packages/settings-library/src/settings_library/utils_encoders.py b/packages/settings-library/src/settings_library/utils_encoders.py index 71ea960bf78..f38e156b6a5 100644 --- a/packages/settings-library/src/settings_library/utils_encoders.py +++ b/packages/settings-library/src/settings_library/utils_encoders.py @@ -12,7 +12,7 @@ def create_json_encoder_wo_secrets(model_cls: type[BaseModel]): show_secrets_encoder = create_json_encoder_wo_secrets(type(model)) model.dict(encoder=show_secrets_encoder)['my_secret'] == "secret" """ - current_encoders = getattr(model_cls.Config, "json_encoders", {}) + current_encoders = getattr(model_cls.model_config, "json_encoders", {}) return partial( custom_pydantic_encoder, { diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index e7bb66057c5..17746487a6f 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -4,14 +4,13 @@ """ from enum import Enum, auto -from pydantic import parse_obj_as from pydantic.networks import AnyUrl from pydantic.types import SecretStr from .basic_types import PortInt -DEFAULT_AIOHTTP_PORT: PortInt = parse_obj_as(PortInt, 8080) -DEFAULT_FASTAPI_PORT: PortInt = parse_obj_as(PortInt, 8000) +DEFAULT_AIOHTTP_PORT: PortInt = 8080 +DEFAULT_FASTAPI_PORT: PortInt = 8000 class URLPart(Enum): @@ -96,6 +95,8 @@ def _compose_url( assert prefix # nosec prefix = prefix.upper() + port_value = self._safe_getattr(f"{prefix}_PORT", port) + parts = { "scheme": ( "https" @@ -103,30 +104,32 @@ def _compose_url( else "http" ), "host": self._safe_getattr(f"{prefix}_HOST", URLPart.REQUIRED), - "user": self._safe_getattr(f"{prefix}_USER", user), + "port": int(port_value) if port_value is not None else None, + "username": self._safe_getattr(f"{prefix}_USER", user), "password": self._safe_getattr(f"{prefix}_PASSWORD", password), - "port": self._safe_getattr(f"{prefix}_PORT", port), } if vtag != URLPart.EXCLUDE: # noqa: SIM102 if v := self._safe_getattr(f"{prefix}_VTAG", vtag): - parts["path"] = f"/{v}" + parts["path"] = f"{v}" # post process parts dict kwargs = {} - for k, v in parts.items(): - value = v + for k, v in parts.items(): # type: ignore[assignment] if isinstance(v, SecretStr): value = v.get_secret_value() - elif v is not None: - value = f"{v}" + else: + value = v - kwargs[k] = value + if value is not None: + kwargs[k] = value - assert all(isinstance(v, str) or v is None for v in kwargs.values()) # nosec + assert all( + isinstance(v, (str, int)) or v is None for v in kwargs.values() + ) # nosec - composed_url: str = AnyUrl.build(**kwargs) - return composed_url + composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] + return composed_url.rstrip("/") def _build_api_base_url(self, *, prefix: str) -> str: return self._compose_url( diff --git a/packages/settings-library/src/settings_library/webserver.py b/packages/settings-library/src/settings_library/webserver.py index 4da2c41d699..c32bdbeb0c5 100644 --- a/packages/settings-library/src/settings_library/webserver.py +++ b/packages/settings-library/src/settings_library/webserver.py @@ -1,7 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as - from .base import BaseCustomSettings from .basic_types import PortInt, VersionTag from .utils_service import DEFAULT_AIOHTTP_PORT, MixinServiceSettings, URLPart @@ -10,7 +8,7 @@ class WebServerSettings(BaseCustomSettings, MixinServiceSettings): WEBSERVER_HOST: str = "webserver" WEBSERVER_PORT: PortInt = DEFAULT_AIOHTTP_PORT - WEBSERVER_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + WEBSERVER_VTAG: VersionTag = "v0" @cached_property def base_url(self) -> str: diff --git a/packages/settings-library/tests/conftest.py b/packages/settings-library/tests/conftest.py index 725f19c534a..0431a6c6748 100644 --- a/packages/settings-library/tests/conftest.py +++ b/packages/settings-library/tests/conftest.py @@ -96,9 +96,13 @@ class _ApplicationSettings(BaseCustomSettings): # NOTE: by convention, an addon is disabled when APP_ADDON=None, so we make this # entry nullable as well - APP_OPTIONAL_ADDON: _ModuleSettings | None = Field(auto_default_from_env=True) + APP_OPTIONAL_ADDON: _ModuleSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) # NOTE: example of a group that cannot be disabled (not nullable) - APP_REQUIRED_PLUGIN: PostgresSettings | None = Field(auto_default_from_env=True) + APP_REQUIRED_PLUGIN: PostgresSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) return _ApplicationSettings diff --git a/packages/settings-library/tests/test__models_examples.py b/packages/settings-library/tests/test__models_examples.py index c60a6c08261..96ffc7135b2 100644 --- a/packages/settings-library/tests/test__models_examples.py +++ b/packages/settings-library/tests/test__models_examples.py @@ -14,6 +14,6 @@ def test_all_settings_library_models_config_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 8cf3eadc30f..bdc536387fc 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -12,45 +12,50 @@ """ - -from pydantic import BaseSettings, validator -from pydantic.fields import ModelField, Undefined +from common_library.pydantic_fields_extension import is_nullable +from pydantic import ValidationInfo, field_validator +from pydantic.fields import PydanticUndefined +from pydantic_settings import BaseSettings def assert_field_specs( - model_cls, name, is_required, is_nullable, explicit_default, defaults + model_cls: type[BaseSettings], + name: str, + required: bool, + nullable: bool, + explicit_default, ): - field: ModelField = model_cls.__fields__[name] - print(field, field.field_info) + info = model_cls.model_fields[name] + print(info) - assert field.required == is_required - assert field.allow_none == is_nullable - assert field.field_info.default == explicit_default + assert info.is_required() == required + assert is_nullable(info) == nullable - assert field.default == defaults - if field.required: + if info.is_required(): # in this case, default is not really used - assert field.default is None + assert info.default is PydanticUndefined + else: + assert info.default == explicit_default class Settings(BaseSettings): VALUE: int VALUE_DEFAULT: int = 42 - VALUE_NULLABLE_REQUIRED: int | None = ... # type: ignore - VALUE_NULLABLE_OPTIONAL: int | None + VALUE_NULLABLE_REQUIRED: int | None = ... # type: ignore[assignment] + VALUE_NULLABLE_REQUIRED_AS_WELL: int | None VALUE_NULLABLE_DEFAULT_VALUE: int | None = 42 VALUE_NULLABLE_DEFAULT_NULL: int | None = None # Other ways to write down "required" is using ... - VALUE_ALSO_REQUIRED: int = ... # type: ignore + VALUE_REQUIRED_AS_WELL: int = ... # type: ignore[assignment] - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, values, field: ModelField): + def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if field.allow_none: + if info.field_name and is_nullable(cls.model_fields[info.field_name]): if isinstance(v, str) and v.lower() in ("null", "none"): return None return v @@ -64,37 +69,33 @@ def test_fields_declarations(): assert_field_specs( Settings, "VALUE", - is_required=True, - is_nullable=False, - explicit_default=Undefined, - defaults=None, + required=True, + nullable=False, + explicit_default=PydanticUndefined, ) assert_field_specs( Settings, "VALUE_DEFAULT", - is_required=False, - is_nullable=False, + required=False, + nullable=False, explicit_default=42, - defaults=42, ) assert_field_specs( Settings, "VALUE_NULLABLE_REQUIRED", - is_required=True, - is_nullable=True, + required=True, + nullable=True, explicit_default=Ellipsis, - defaults=None, ) assert_field_specs( Settings, - "VALUE_NULLABLE_OPTIONAL", - is_required=False, - is_nullable=True, - explicit_default=Undefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL - defaults=None, + "VALUE_NULLABLE_REQUIRED_AS_WELL", + required=True, + nullable=True, + explicit_default=PydanticUndefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL ) # VALUE_NULLABLE_OPTIONAL interpretation has always been confusing @@ -104,54 +105,53 @@ def test_fields_declarations(): assert_field_specs( Settings, "VALUE_NULLABLE_DEFAULT_VALUE", - is_required=False, - is_nullable=True, + required=False, + nullable=True, explicit_default=42, - defaults=42, ) assert_field_specs( Settings, "VALUE_NULLABLE_DEFAULT_NULL", - is_required=False, - is_nullable=True, - explicit_default=None, # <- difference wrt VALUE_NULLABLE_OPTIONAL - defaults=None, + required=False, + nullable=True, + explicit_default=None, ) assert_field_specs( Settings, - "VALUE_ALSO_REQUIRED", - is_required=True, - is_nullable=False, + "VALUE_REQUIRED_AS_WELL", + required=True, + nullable=False, explicit_default=Ellipsis, - defaults=None, ) def test_construct(monkeypatch): # from __init__ settings_from_init = Settings( - VALUE=1, VALUE_ALSO_REQUIRED=10, VALUE_NULLABLE_REQUIRED=None + VALUE=1, + VALUE_NULLABLE_REQUIRED=None, + VALUE_NULLABLE_REQUIRED_AS_WELL=None, + VALUE_REQUIRED_AS_WELL=10, ) - print(settings_from_init.json(exclude_unset=True, indent=1)) + print(settings_from_init.model_dump_json(exclude_unset=True, indent=1)) # from env vars monkeypatch.setenv("VALUE", "1") - monkeypatch.setenv("VALUE_ALSO_REQUIRED", "10") - monkeypatch.setenv( - "VALUE_NULLABLE_REQUIRED", "null" - ) # WARNING: set this env to None would not work w/o ``parse_none`` validator! bug??? + monkeypatch.setenv("VALUE_REQUIRED_AS_WELL", "10") + monkeypatch.setenv("VALUE_NULLABLE_REQUIRED", "null") + monkeypatch.setenv("VALUE_NULLABLE_REQUIRED_AS_WELL", None) - settings_from_env = Settings() - print(settings_from_env.json(exclude_unset=True, indent=1)) + settings_from_env = Settings() # type: ignore[call-arg] + print(settings_from_env.model_dump_json(exclude_unset=True, indent=1)) assert settings_from_init == settings_from_env # mixed - settings_from_both = Settings(VALUE_NULLABLE_REQUIRED=3) - print(settings_from_both.json(exclude_unset=True, indent=1)) + settings_from_both = Settings(VALUE_NULLABLE_REQUIRED=3) # type: ignore[call-arg] + print(settings_from_both.model_dump_json(exclude_unset=True, indent=1)) - assert settings_from_both == settings_from_init.copy( + assert settings_from_both == settings_from_init.model_copy( update={"VALUE_NULLABLE_REQUIRED": 3} ) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 7cbd9fa8773..3344aa6b35a 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -10,8 +10,9 @@ import pytest import settings_library.base -from pydantic import BaseModel, BaseSettings, ValidationError +from pydantic import BaseModel, ValidationError from pydantic.fields import Field +from pydantic_settings import BaseSettings from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import ( @@ -38,17 +39,17 @@ def _get_attrs_tree(obj: Any) -> dict[str, Any]: def _print_defaults(model_cls: type[BaseModel]): - for field in model_cls.__fields__.values(): - print(field.name, ":", end="") + for name, field in model_cls.model_fields.items(): + print(name, ":", end="") try: - default = field.get_default() + default = field.get_default(call_default_factory=True) # new in Pydatic v2 print(default, type(default)) except ValidationError as err: print(err) def _dumps_model_class(model_cls: type[BaseModel]): - d = {field.name: _get_attrs_tree(field) for field in model_cls.__fields__.values()} + d = {name: _get_attrs_tree(field) for name, field in model_cls.model_fields.items()} return json.dumps(d, indent=1) @@ -61,16 +62,19 @@ class S(BaseCustomSettings): class M1(BaseCustomSettings): VALUE: S VALUE_DEFAULT: S = S(S_VALUE=42) - VALUE_CONFUSING: S = None # type: ignore + # VALUE_CONFUSING: S = None # type: ignore VALUE_NULLABLE_REQUIRED: S | None = ... # type: ignore - VALUE_NULLABLE_OPTIONAL: S | None VALUE_NULLABLE_DEFAULT_VALUE: S | None = S(S_VALUE=42) VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) - VALUE_DEFAULT_ENV: S = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) + VALUE_DEFAULT_ENV: S = Field( + json_schema_extra={"auto_default_from_env": True} + ) class M2(BaseCustomSettings): # @@ -82,10 +86,14 @@ class M2(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None # defaults enabled but if not exists, it disables - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) # cannot be disabled - VALUE_DEFAULT_ENV: S = Field(auto_default_from_env=True) + VALUE_DEFAULT_ENV: S = Field( + json_schema_extra={"auto_default_from_env": True} + ) # Changed in version 3.7: Dictionary order is guaranteed to be insertion order _classes = {"M1": M1, "M2": M2, "S": S} @@ -101,14 +109,14 @@ def test_create_settings_class( # DEV: Path("M1.ignore.json").write_text(dumps_model_class(M)) - assert M.__fields__["VALUE_NULLABLE_DEFAULT_ENV"].default_factory + assert M.model_fields["VALUE_NULLABLE_DEFAULT_ENV"].default_factory - assert M.__fields__["VALUE_NULLABLE_DEFAULT_ENV"].get_default() is None + assert M.model_fields["VALUE_NULLABLE_DEFAULT_ENV"].get_default() is None - assert M.__fields__["VALUE_DEFAULT_ENV"].default_factory + assert M.model_fields["VALUE_DEFAULT_ENV"].default_factory with pytest.raises(DefaultFromEnvFactoryError): - M.__fields__["VALUE_DEFAULT_ENV"].get_default() + M.model_fields["VALUE_DEFAULT_ENV"].get_default(call_default_factory=True) def test_create_settings_class_with_environment( @@ -136,20 +144,19 @@ def test_create_settings_class_with_environment( instance = SettingsClass() - print(instance.json(indent=2)) + print(instance.model_dump_json(indent=2)) # checks - assert instance.dict(exclude_unset=True) == { + assert instance.model_dump(exclude_unset=True) == { "VALUE": {"S_VALUE": 2}, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, } - assert instance.dict() == { + assert instance.model_dump() == { "VALUE": {"S_VALUE": 2}, "VALUE_DEFAULT": {"S_VALUE": 42}, - "VALUE_CONFUSING": None, + # "VALUE_CONFUSING": None, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, - "VALUE_NULLABLE_OPTIONAL": None, "VALUE_NULLABLE_DEFAULT_VALUE": {"S_VALUE": 42}, "VALUE_NULLABLE_DEFAULT_NULL": None, "VALUE_NULLABLE_DEFAULT_ENV": {"S_VALUE": 1}, @@ -163,13 +170,15 @@ def test_create_settings_class_without_environ_fails( # now defining S_VALUE M2_outside_context = create_settings_class("M2") - with pytest.raises(ValidationError) as err_info: + with pytest.raises(DefaultFromEnvFactoryError) as err_info: M2_outside_context.create_from_envs() - assert err_info.value.errors()[0] == { - "loc": ("VALUE_DEFAULT_ENV", "S_VALUE"), - "msg": "field required", - "type": "value_error.missing", + assert err_info.value.errors[0] == { + "input": {}, + "loc": ("S_VALUE",), + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.9/v/missing", } @@ -202,7 +211,9 @@ def test_auto_default_to_none_logs_a_warning( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -224,7 +235,9 @@ def test_auto_default_to_not_none( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -286,9 +299,11 @@ class SettingsClassExt(SettingsClass): error = err_info.value.errors()[0] assert error == { + "input": "", "loc": ("INT_VALUE_TO_NOTHING",), - "msg": "value is not a valid integer", - "type": "type_error.integer", + "msg": "Input should be a valid integer, unable to parse string as an integer", + "type": "int_parsing", + "url": "https://errors.pydantic.dev/2.9/v/int_parsing", } diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index d54d40bf925..85fd98c7522 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -3,10 +3,11 @@ # pylint: disable=unused-variable +import os from collections.abc import Callable import pytest -from pydantic import Field, ValidationError +from pydantic import AliasChoices, Field, ValidationError from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import BaseCustomSettings, DefaultFromEnvFactoryError from settings_library.basic_types import PortInt @@ -22,6 +23,13 @@ # +@pytest.fixture +def postgres_envvars_unset(monkeypatch: pytest.MonkeyPatch) -> None: + for name in os.environ: + if name.startswith("POSTGRES_"): + monkeypatch.delenv(name) + + @pytest.fixture def model_classes_factory() -> Callable: # @@ -49,7 +57,9 @@ class _FakePostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( None, - env=["HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME"], + validation_alias=AliasChoices( + "HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME" + ), ) # @@ -60,18 +70,18 @@ class S1(BaseCustomSettings): WEBSERVER_POSTGRES: _FakePostgresSettings class S2(BaseCustomSettings): - WEBSERVER_POSTGRES_NULLABLE_OPTIONAL: _FakePostgresSettings | None + WEBSERVER_POSTGRES_NULLABLE_OPTIONAL: _FakePostgresSettings | None = None class S3(BaseCustomSettings): # cannot be disabled!! WEBSERVER_POSTGRES_DEFAULT_ENV: _FakePostgresSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) class S4(BaseCustomSettings): # defaults enabled but if cannot be resolved, it disables WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = ( - Field(auto_default_from_env=True) + Field(json_schema_extra={"auto_default_from_env": True}) ) class S5(BaseCustomSettings): @@ -104,7 +114,9 @@ class S5(BaseCustomSettings): # -def test_parse_from_empty_envs(model_classes_factory: Callable): +def test_parse_from_empty_envs( + postgres_envvars_unset: None, model_classes_factory: Callable +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -115,7 +127,7 @@ def test_parse_from_empty_envs(model_classes_factory: Callable): assert s2.WEBSERVER_POSTGRES_NULLABLE_OPTIONAL is None with pytest.raises(DefaultFromEnvFactoryError): - # NOTE: cannot hae a default or assignment + # NOTE: cannot have a default or assignment S3() # auto default factory resolves to None (because is nullable) @@ -126,7 +138,11 @@ def test_parse_from_empty_envs(model_classes_factory: Callable): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_parse_from_individual_envs(monkeypatch, model_classes_factory): +def test_parse_from_individual_envs( + postgres_envvars_unset: None, + monkeypatch: pytest.MonkeyPatch, + model_classes_factory: Callable, +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -146,18 +162,20 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): S1() assert exc_info.value.errors()[0] == { + "input": {}, "loc": ("WEBSERVER_POSTGRES",), - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.9/v/missing", } s2 = S2() - assert s2.dict(exclude_unset=True) == {} - assert s2.dict() == {"WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": None} + assert s2.model_dump(exclude_unset=True) == {} + assert s2.model_dump() == {"WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": None} s3 = S3() - assert s3.dict(exclude_unset=True) == {} - assert s3.dict() == { + assert s3.model_dump(exclude_unset=True) == {} + assert s3.model_dump() == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg", "POSTGRES_USER": "test", @@ -171,8 +189,8 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): } s4 = S4() - assert s4.dict(exclude_unset=True) == {} - assert s4.dict() == { + assert s4.model_dump(exclude_unset=True) == {} + assert s4.model_dump() == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg", "POSTGRES_USER": "test", @@ -186,11 +204,13 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): } s5 = S5() - assert s5.dict(exclude_unset=True) == {} - assert s5.dict() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} + assert s5.model_dump(exclude_unset=True) == {} + assert s5.model_dump() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} -def test_parse_compact_env(monkeypatch, model_classes_factory): +def test_parse_compact_env( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -209,7 +229,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): # test s1 = S1() - assert s1.dict(exclude_unset=True) == { + assert s1.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -217,7 +237,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): "POSTGRES_DB": "db2", } } - assert s1.dict() == { + assert s1.model_dump() == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -238,7 +258,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): """, ) s2 = S2() - assert s2.dict(exclude_unset=True) == { + assert s2.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -258,7 +278,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): # default until it is really needed. Here before it would # fail because default cannot be computed even if the final value can! s3 = S3() - assert s3.dict(exclude_unset=True) == { + assert s3.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -275,7 +295,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): """, ) s4 = S4() - assert s4.dict(exclude_unset=True) == { + assert s4.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -292,7 +312,7 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): """, ) s5 = S5() - assert s5.dict(exclude_unset=True) == { + assert s5.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -302,7 +322,9 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): } -def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): +def test_parse_from_mixed_envs( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -326,7 +348,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): s1 = S1() - assert s1.dict() == { + assert s1.model_dump() == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -341,7 +363,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): # NOTE how unset marks also applies to embedded fields # NOTE: (1) priority of json-compact over granulated # NOTE: (2) json-compact did not define this but granulated did - assert s1.dict(exclude_unset=True) == { + assert s1.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", # <- (1) "POSTGRES_USER": "test2", # <- (1) @@ -358,7 +380,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s2 = S2() - assert s2.dict(exclude_unset=True) == { + assert s2.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -375,7 +397,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s3 = S3() - assert s3.dict(exclude_unset=True) == { + assert s3.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -392,7 +414,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s4 = S4() - assert s4.dict(exclude_unset=True) == { + assert s4.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -409,7 +431,7 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): ) s5 = S5() - assert s5.dict(exclude_unset=True) == { + assert s5.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -436,7 +458,9 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): # -def test_toggle_plugin_1(monkeypatch, model_classes_factory): +def test_toggle_plugin_1( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() @@ -449,7 +473,9 @@ def test_toggle_plugin_1(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_2(monkeypatch, model_classes_factory): +def test_toggle_plugin_2( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() # minimal @@ -470,7 +496,9 @@ def test_toggle_plugin_2(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_3(monkeypatch, model_classes_factory): +def test_toggle_plugin_3( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() # explicitly disables @@ -493,7 +521,9 @@ def test_toggle_plugin_3(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_4(monkeypatch, model_classes_factory): +def test_toggle_plugin_4( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() JSON_VALUE = '{"POSTGRES_HOST":"pg2", "POSTGRES_USER":"test2", "POSTGRES_PASSWORD":"shh2", "POSTGRES_DB":"db2"}' diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index 1cd3978503e..acb9d607c89 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -67,7 +67,7 @@ def all_env_devel_undefined( ], ) def test_smtp_configuration_ok(cfg: dict[str, Any], all_env_devel_undefined: None): - assert SMTPSettings.parse_obj(cfg) + assert SMTPSettings.model_validate(cfg) @pytest.mark.parametrize( diff --git a/packages/settings-library/tests/test_postgres.py b/packages/settings-library/tests/test_postgres.py index 1708acc7808..c191f0ea37e 100644 --- a/packages/settings-library/tests/test_postgres.py +++ b/packages/settings-library/tests/test_postgres.py @@ -14,23 +14,20 @@ def env_file(): def test_cached_property_dsn(mock_environment: dict): - settings = PostgresSettings() + settings = PostgresSettings() # type: ignore[call-arg] # all are upper-case - assert all(key == key.upper() for key in settings.dict()) - - # dsn is computed from the other fields - assert "dsn" not in settings.dict() - - # causes cached property to be computed and stored on the instance + assert all(key == key.upper() for key in settings.model_dump()) + assert settings.dsn - assert "dsn" in settings.dict() + # dsn is computed from the other fields + assert "dsn" not in settings.model_dump() def test_dsn_with_query(mock_environment: dict, monkeypatch): - settings = PostgresSettings() + settings = PostgresSettings() # type: ignore[call-arg] assert not settings.POSTGRES_CLIENT_NAME assert settings.dsn == "postgresql://foo:secret@localhost:5432/foodb" @@ -38,7 +35,7 @@ def test_dsn_with_query(mock_environment: dict, monkeypatch): # now with app monkeypatch.setenv("POSTGRES_CLIENT_NAME", "Some &43 funky name") - settings_with_app = PostgresSettings() + settings_with_app = PostgresSettings() # type: ignore[call-arg] assert settings_with_app.POSTGRES_CLIENT_NAME assert ( diff --git a/packages/settings-library/tests/test_twilio.py b/packages/settings-library/tests/test_twilio.py index 6f2830ea4aa..1989fbe6a9f 100644 --- a/packages/settings-library/tests/test_twilio.py +++ b/packages/settings-library/tests/test_twilio.py @@ -20,7 +20,7 @@ def test_twilio_settings_within_envdevel( }, ) settings = TwilioSettings.create_from_envs() - print(settings.json(indent=2)) + print(settings.model_dump_json(indent=2)) assert settings diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 611ccf2509f..b4e50902124 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -18,10 +18,10 @@ from settings_library.utils_cli import ( create_settings_command, create_version_callback, + model_dump_with_secrets, print_as_envfile, print_as_json, ) -from settings_library.utils_encoders import create_json_encoder_wo_secrets from typer.testing import CliRunner log = logging.getLogger(__name__) @@ -84,12 +84,7 @@ def fake_granular_env_file_content() -> str: @pytest.fixture def export_as_dict() -> Callable: def _export(model_obj, **export_options): - return json.loads( - model_obj.json( - encoder=create_json_encoder_wo_secrets(model_obj.__class__), - **export_options, - ) - ) + return model_dump_with_secrets(model_obj, show_secrets=True, **export_options) return _export @@ -136,7 +131,7 @@ def test_settings_as_json( # reuse resulting json to build settings settings: dict = json.loads(result.stdout) - assert fake_settings_class.parse_obj(settings) + assert fake_settings_class.model_validate(settings) def test_settings_as_json_schema( @@ -439,7 +434,7 @@ class FakeSettings(BaseCustomSettings): assert "secret" not in captured.out assert "Some info" not in captured.out - print_as_json(settings_obj, compact=True) + print_as_json(settings_obj, compact=True, show_secrets=False) captured = capsys.readouterr() assert "secret" not in captured.out assert "**" in captured.out diff --git a/packages/settings-library/tests/test_utils_logging.py b/packages/settings-library/tests/test_utils_logging.py index 9054b391333..d7f4cf31569 100644 --- a/packages/settings-library/tests/test_utils_logging.py +++ b/packages/settings-library/tests/test_utils_logging.py @@ -1,6 +1,6 @@ import logging -from pydantic import Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.basic_types import BootMode from settings_library.utils_logging import MixinLoggingSettings @@ -14,20 +14,20 @@ def test_mixin_logging(monkeypatch): class Settings(BaseCustomSettings, MixinLoggingSettings): # DOCKER - SC_BOOT_MODE: BootMode | None + SC_BOOT_MODE: BootMode | None = None # LOGGING LOG_LEVEL: str = Field( "WARNING", - env=[ + validation_alias=AliasChoices( "APPNAME_LOG_LEVEL", "LOG_LEVEL", - ], + ), ) APPNAME_DEBUG: bool = Field(False, description="Starts app in debug mode") - @validator("LOG_LEVEL") + @field_validator("LOG_LEVEL") @classmethod def _v(cls, value) -> str: return cls.validate_log_level(value) @@ -40,14 +40,10 @@ def _v(cls, value) -> str: assert settings.LOG_LEVEL == "DEBUG" assert ( - settings.json() - == '{"SC_BOOT_MODE": null, "LOG_LEVEL": "DEBUG", "APPNAME_DEBUG": false}' + settings.model_dump_json() + == '{"SC_BOOT_MODE":null,"LOG_LEVEL":"DEBUG","APPNAME_DEBUG":false}' ) # test cached-property assert settings.log_level == logging.DEBUG - # log_level is cached-property (notice that is lower-case!), and gets added after first use - assert ( - settings.json() - == '{"SC_BOOT_MODE": null, "LOG_LEVEL": "DEBUG", "APPNAME_DEBUG": false, "log_level": 10}' - ) + diff --git a/packages/settings-library/tests/test_utils_service.py b/packages/settings-library/tests/test_utils_service.py index a3638f9b31e..8ecd9835893 100644 --- a/packages/settings-library/tests/test_utils_service.py +++ b/packages/settings-library/tests/test_utils_service.py @@ -5,7 +5,7 @@ from functools import cached_property import pytest -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pydantic.types import SecretStr from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag @@ -24,9 +24,9 @@ class MySettings(BaseCustomSettings, MixinServiceSettings): MY_VTAG: VersionTag | None = None MY_SECURE: bool = False - # optional - MY_USER: str | None - MY_PASSWORD: SecretStr | None + # optional (in Pydantic v2 requires a default) + MY_USER: str | None = None + MY_PASSWORD: SecretStr | None = None @cached_property def api_base_url(self) -> str: @@ -88,8 +88,8 @@ def test_service_settings_base_urls(service_settings_cls: type): settings_with_defaults = service_settings_cls() - base_url = parse_obj_as(AnyHttpUrl, settings_with_defaults.base_url) - api_base_url = parse_obj_as(AnyHttpUrl, settings_with_defaults.api_base_url) + base_url = TypeAdapter(AnyHttpUrl).validate_python(settings_with_defaults.base_url) + api_base_url = TypeAdapter(AnyHttpUrl).validate_python(settings_with_defaults.api_base_url) assert base_url.path != api_base_url.path assert (base_url.scheme, base_url.host, base_url.port) == ( diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 14712a97fc2..28d31ff4fb2 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -31,6 +31,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -44,10 +46,8 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 @@ -61,8 +61,6 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via anyio fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -129,7 +127,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -146,12 +144,28 @@ pydantic==1.10.17 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -225,12 +239,12 @@ typing-extensions==4.12.2 # via # aiodebug # alembic - # anyio # faststream # flexcache # flexparser # pint # pydantic + # pydantic-core # typer yarl==1.9.4 # via diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index db22f856cee..fa838e38766 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -28,12 +28,12 @@ alembic==1.13.2 # via # -c requirements/_base.txt # -r requirements/_test.in -antlr4-python3-runtime==4.13.2 - # via moto -async-timeout==4.0.3 +annotated-types==0.7.0 # via # -c requirements/_base.txt - # aiohttp + # pydantic +antlr4-python3-runtime==4.13.2 + # via moto attrs==24.2.0 # via # -c requirements/_base.txt @@ -88,10 +88,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # pytest execnet==2.1.1 # via pytest-xdist faker==27.0.0 @@ -207,11 +203,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -249,7 +249,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt @@ -304,11 +306,6 @@ sympy==1.13.2 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiobotocore==2.13.2 # via -r requirements/_test.in types-aiobotocore-s3==2.13.2 @@ -327,6 +324,7 @@ typing-extensions==4.12.2 # cfn-lint # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index 8ca413e037f..5a573bd4848 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -76,22 +76,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index 7b8b810ba38..7579c3eeb0c 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.users import UserID -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.archiving_utils import unarchive_dir from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData @@ -25,7 +25,9 @@ def __create_s3_object_key( project_id: ProjectID, node_uuid: NodeID, file_path: Path | str ) -> StorageFileID: file_name = file_path.name if isinstance(file_path, Path) else file_path - return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}") # type: ignore[arg-type] + return TypeAdapter(StorageFileID).validate_python( + f"{project_id}/{node_uuid}/{file_name}" + ) def __get_s3_name(path: Path, *, is_archive: bool) -> str: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py index 7b5467c2851..a5305dd5b93 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py @@ -14,7 +14,7 @@ from models_library.projects_nodes_io import LocationID, LocationName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from settings_library.node_ports import NodePortsSettings from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -42,7 +42,7 @@ async def _get_location_id_from_location_name( raise exceptions.S3InvalidStore(store) -def _get_https_link_if_storage_secure(url: AnyUrl) -> str: +def _get_https_link_if_storage_secure(url: str) -> str: # NOTE: links generated by storage are http only. # WEBSERVER -> STORAGE (http requests) # DY-SIDECAR (simcore-sdk) -> STORAGE (httpS requests) @@ -69,18 +69,18 @@ async def _complete_upload( :rtype: ETag """ async with session.post( - _get_https_link_if_storage_secure(upload_completion_link), + _get_https_link_if_storage_secure(str(upload_completion_link)), json=jsonable_encoder(FileUploadCompletionBody(parts=parts)), auth=get_basic_auth(), ) as resp: resp.raise_for_status() # now poll for state - file_upload_complete_response = parse_obj_as( - Envelope[FileUploadCompleteResponse], await resp.json() + file_upload_complete_response = TypeAdapter(Envelope[FileUploadCompleteResponse]).validate_python( + await resp.json() ) assert file_upload_complete_response.data # nosec state_url = _get_https_link_if_storage_secure( - file_upload_complete_response.data.links.state + str(file_upload_complete_response.data.links.state) ) _logger.info("completed upload of %s", f"{len(parts)} parts, received {state_url}") @@ -96,8 +96,8 @@ async def _complete_upload( with attempt: async with session.post(state_url, auth=get_basic_auth()) as resp: resp.raise_for_status() - future_enveloped = parse_obj_as( - Envelope[FileUploadCompleteFutureResponse], await resp.json() + future_enveloped = TypeAdapter(Envelope[FileUploadCompleteFutureResponse]).validate_python( + await resp.json() ) assert future_enveloped.data # nosec if future_enveloped.data.state == FileUploadCompleteState.NOK: @@ -142,7 +142,7 @@ async def _abort_upload( # abort the upload correctly, so it can revert back to last version try: async with session.post( - _get_https_link_if_storage_secure(abort_upload_link), auth=get_basic_auth() + _get_https_link_if_storage_secure(str(abort_upload_link)), auth=get_basic_auth() ) as resp: resp.raise_for_status() except ClientError: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py index 35d1d7c71f8..320cfd7e25f 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py @@ -6,10 +6,11 @@ from asyncio.streams import StreamReader from pathlib import Path +from common_library.errors_classes import OsparcErrorMixin + from aiocache import cached # type: ignore[import-untyped] from models_library.basic_types import IDStr from pydantic import AnyUrl, ByteSize -from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings @@ -24,7 +25,7 @@ _OSPARC_SYMLINK_EXTENSION = ".rclonelink" # named `rclonelink` to maintain backwards -class BaseAwsS3CliError(PydanticErrorMixin, RuntimeError): +class BaseAwsS3CliError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 695b710c8f8..5feefab82f8 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -253,7 +253,7 @@ def _check_for_aws_http_errors(exc: BaseException) -> bool: async def _session_put( session: ClientSession, file_part_size: int, - upload_url: AnyUrl, + upload_url: str, pbar: tqdm, io_log_redirect_cb: LogRedirectCB | None, progress_bar: ProgressBarData, @@ -314,7 +314,7 @@ async def _upload_file_part( received_e_tag = await _session_put( session=session, file_part_size=file_part_size, - upload_url=upload_url, + upload_url=str(upload_url), pbar=pbar, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index 6a5609c7eb5..f3e2587fab7 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -15,7 +15,7 @@ from models_library.basic_types import IDStr, SHA256Str from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter, parse_obj_as from servicelib.file_utils import create_sha256_checksum from servicelib.progress_bar import ProgressBarData from settings_library.aws_s3_cli import AwsS3CliSettings @@ -189,14 +189,16 @@ async def download_path_from_s3( aws_s3_cli_settings, progress_bar, local_directory_path=local_path, - download_s3_link=parse_obj_as(AnyUrl, f"{download_link}"), + download_s3_link=TypeAdapter(AnyUrl).validate_python(f"{download_link}"), ) elif r_clone_settings: await r_clone.sync_s3_to_local( r_clone_settings, progress_bar, local_directory_path=local_path, - download_s3_link=parse_obj_as(AnyUrl, f"{download_link}"), + download_s3_link=str( + TypeAdapter(AnyUrl).validate_python(f"{download_link}") + ), ) else: msg = "Unexpected configuration" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 18e15139493..bbfe14e7f39 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -8,11 +8,12 @@ from pathlib import Path from typing import Final +from common_library.errors_classes import OsparcErrorMixin + from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile from models_library.basic_types import IDStr from pydantic import AnyUrl, BaseModel, ByteSize -from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.r_clone import RCloneSettings @@ -31,7 +32,7 @@ _logger = logging.getLogger(__name__) -class BaseRCloneError(PydanticErrorMixin, RuntimeError): +class BaseRCloneError(OsparcErrorMixin, RuntimeError): ... @@ -166,7 +167,7 @@ async def _get_folder_size( cwd=f"{local_dir.resolve()}", ) - rclone_folder_size_result = _RCloneSize.parse_raw(result) + rclone_folder_size_result = _RCloneSize.model_validate_json(result) _logger.debug( "RClone size call for %s: %s", f"{folder}", f"{rclone_folder_size_result}" ) @@ -259,7 +260,7 @@ async def sync_local_to_s3( """ _raise_if_directory_is_file(local_directory_path) - upload_s3_path = re.sub(r"^s3://", "", upload_s3_link) + upload_s3_path = re.sub(r"^s3://", "", str(upload_s3_link)) _logger.debug(" %s; %s", f"{upload_s3_link=}", f"{upload_s3_path=}") await _sync_sources( @@ -279,7 +280,7 @@ async def sync_s3_to_local( progress_bar: ProgressBarData, *, local_directory_path: Path, - download_s3_link: AnyUrl, + download_s3_link: str, exclude_patterns: set[str] | None = None, debug_logs: bool = False, ) -> None: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index f539e451026..75ed54ec686 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -3,7 +3,7 @@ from typing import Union from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, Field, parse_raw_as +from pydantic import BaseModel, ByteSize, ConfigDict, Field, TypeAdapter from servicelib.logging_utils import log_catch from servicelib.progress_bar import ProgressBarData @@ -12,6 +12,8 @@ _logger = logging.getLogger(__name__) + + class _RCloneSyncMessageBase(BaseModel): level: str = Field(..., description="log level") msg: str @@ -31,9 +33,7 @@ class _RCloneSyncTransferCompletedMessage(_RCloneSyncMessageBase): class _RCloneSyncTransferringStats(BaseModel): bytes: ByteSize total_bytes: ByteSize - - class Config: - alias_generator = snake_to_camel + model_config = ConfigDict(alias_generator=snake_to_camel) class _RCloneSyncTransferringMessage(_RCloneSyncMessageBase): @@ -77,8 +77,7 @@ def __init__(self, progress_bar: ProgressBarData) -> None: async def __call__(self, logs: str) -> None: _logger.debug("received logs: %s", logs) with log_catch(_logger, reraise=False): - rclone_message: _RCloneSyncMessages = parse_raw_as( - _RCloneSyncMessages, # type: ignore[arg-type] + rclone_message: _RCloneSyncMessages = TypeAdapter(_RCloneSyncMessages).validate_strings( logs, ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index c249cbcf830..b7a394a6dbd 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -144,7 +144,7 @@ async def get_storage_locations( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}"}, ) as response: - locations_enveloped = Envelope[FileLocationArray].parse_obj( + locations_enveloped = Envelope[FileLocationArray].model_validate( await response.json() ) if locations_enveloped.data is None: @@ -173,7 +173,7 @@ async def get_download_file_link( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}", "link_type": link_type.value}, ) as response: - presigned_link_enveloped = Envelope[PresignedLink].parse_obj( + presigned_link_enveloped = Envelope[PresignedLink].model_validate( await response.json() ) if not presigned_link_enveloped.data or not presigned_link_enveloped.data.link: @@ -215,7 +215,7 @@ async def get_upload_file_links( expected_status=status.HTTP_200_OK, params=query_params, ) as response: - file_upload_links_enveloped = Envelope[FileUploadSchema].parse_obj( + file_upload_links_enveloped = Envelope[FileUploadSchema].model_validate( await response.json() ) if file_upload_links_enveloped.data is None: @@ -245,7 +245,7 @@ async def get_file_metadata( # NOTE: keeps backwards compatibility raise exceptions.S3InvalidPathError(file_id) - file_metadata_enveloped = Envelope[FileMetaDataGet].parse_obj(payload) + file_metadata_enveloped = Envelope[FileMetaDataGet].model_validate(payload) assert file_metadata_enveloped.data # nosec return file_metadata_enveloped.data @@ -265,7 +265,7 @@ async def list_file_metadata( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}", "uuid_filter": uuid_filter}, ) as resp: - envelope = Envelope[list[FileMetaDataGet]].parse_obj(await resp.json()) + envelope = Envelope[list[FileMetaDataGet]].model_validate(await resp.json()) assert envelope.data is not None # nosec file_meta_data: list[FileMetaDataGet] = envelope.data return file_meta_data diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py index d8eb1d99349..ad94884c3b0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py @@ -4,18 +4,27 @@ from models_library.basic_regex import UUID_RE from models_library.projects_nodes_io import BaseFileLink, DownloadLink from models_library.projects_nodes_io import PortLink as BasePortLink -from pydantic import AnyUrl, Extra, Field, StrictBool, StrictFloat, StrictInt, StrictStr +from pydantic import ( + AnyUrl, + ConfigDict, + Field, + StrictBool, + StrictFloat, + StrictInt, + StrictStr, +) class PortLink(BasePortLink): - node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it + node_uuid: str = Field(..., pattern=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it class FileLink(BaseFileLink): """allow all kind of file links""" - class Config: - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) # TODO: needs to be in sync with project_nodes.InputTypes and project_nodes.OutputTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 8c78e28a066..d0ef9eb14bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -1,16 +1,19 @@ +from asyncio import Task +import traceback import logging from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any +from pydantic_core import InitErrorDetails + from models_library.api_schemas_storage import LinkType from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey from models_library.users import UserID -from pydantic import BaseModel, Field, ValidationError -from pydantic.error_wrappers import flatten_errors +from pydantic import BaseModel, ConfigDict, Field, ValidationError from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings @@ -27,6 +30,16 @@ log = logging.getLogger(__name__) +def _format_error(task:Task)-> str: + # pylint:disable=protected-access + assert task._exception #nosec + error_list= traceback.format_exception(type(task._exception), task._exception, task._exception.__traceback__) + return "\n".join(error_list) + +def _get_error_details(task:Task, port_key:str)->InitErrorDetails: + # pylint:disable=protected-access + return InitErrorDetails(type="value_error", loc=(f"{port_key}",), input=_format_error(task), ctx={"error":task._exception}) + class Nodeports(BaseModel): """ Represents a node in a project and all its input/output ports @@ -47,9 +60,9 @@ class Nodeports(BaseModel): r_clone_settings: RCloneSettings | None = None io_log_redirect_cb: LogRedirectCB | None aws_s3_cli_settings: AwsS3CliSettings | None = None - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) def __init__(self, **data: Any): super().__init__(**data) @@ -181,9 +194,9 @@ async def set_multiple( await self.save_to_db_cb(self) # groups all ValidationErrors pre-pending 'port_key' to loc and raises ValidationError - if errors := [ - list(flatten_errors([r], self.__config__, loc=(f"{port_key}",))) + if error_details:= [ + _get_error_details(r, port_key) for port_key, r in zip(port_values.keys(), results) - if isinstance(r, ValidationError) + if r is not None ]: - raise ValidationError(errors, model=type(self)) + raise ValidationError.from_exception_data(title="Multiple port_key errors",line_errors=error_details) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 2338563dcdb..3ddab6a29d3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -10,8 +10,16 @@ from models_library.basic_types import IDStr from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey -from pydantic import AnyUrl, Field, PrivateAttr, ValidationError, validator -from pydantic.tools import parse_obj_as +from pydantic import ( + AnyUrl, + ConfigDict, + Field, + PrivateAttr, + TypeAdapter, + ValidationError, + ValidationInfo, + field_validator, +) from servicelib.progress_bar import ProgressBarData from ..node_ports_common.exceptions import ( @@ -56,7 +64,7 @@ def _check_if_symlink_is_valid(symlink: Path) -> None: def can_parse_as(v, *types) -> bool: try: for type_ in types: - parse_obj_as(type_, v) + TypeAdapter(type_).validate_python(v) return True except ValidationError: return False @@ -70,17 +78,23 @@ class SetKWargs: class Port(BaseServiceIOModel): key: ServicePortKey widget: dict[str, Any] | None = None - default_value: DataItemValue | None = Field(None, alias="defaultValue") + default_value: DataItemValue | None = Field( + None, alias="defaultValue", union_mode="left_to_right" + ) - value: DataItemValue | None = None + value: DataItemValue | None = Field( + None, validate_default=True, union_mode="left_to_right" + ) # Different states of "value" # - e.g. typically after resolving a port's link, a download link, ... # - lazy evaluation using get_* members # - used to run validation & conversion of resolved PortContentTypes values # - excluded from all model export - value_item: ItemValue | None = Field(None, exclude=True) - value_concrete: ItemConcreteValue | None = Field(None, exclude=True) + value_item: ItemValue | None = Field(None, exclude=True, union_mode="left_to_right") + value_concrete: ItemConcreteValue | None = Field( + None, exclude=True, union_mode="left_to_right" + ) # Function to convert from ItemValue -> ItemConcreteValue _py_value_converter: Callable[[Any], ItemConcreteValue] = PrivateAttr() @@ -90,15 +104,14 @@ class Port(BaseServiceIOModel): # flags _used_default_value: bool = PrivateAttr(False) - class Config(BaseServiceIOModel.Config): - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) - @validator("value", always=True) + @field_validator("value") @classmethod - def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: + def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: if ( v is not None - and (property_type := values.get("property_type")) + and (property_type := info.data.get("property_type")) and not isinstance(v, PortLink) ): if port_utils.is_file_type(property_type): @@ -108,10 +121,10 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: ) elif property_type == "ref_contentSchema": v, _ = validate_port_content( - port_key=values.get("key"), + port_key=info.data.get("key"), value=v, unit=None, - content_schema=values.get("content_schema", {}), + content_schema=info.data.get("content_schema", {}), ) elif isinstance(v, (list, dict)): raise TypeError( @@ -119,21 +132,21 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: ) return v - @validator("value_item", "value_concrete", pre=True) + @field_validator("value_item", "value_concrete", mode="before") @classmethod - def check_item_or_concrete_value(cls, v, values): + def check_item_or_concrete_value(cls, v, info: ValidationInfo): if ( v - and v != values["value"] - and (property_type := values.get("property_type")) + and v != info.data["value"] + and (property_type := info.data.get("property_type")) and property_type == "ref_contentSchema" and not can_parse_as(v, Path, AnyUrl) ): v, _ = validate_port_content( - port_key=values.get("key"), + port_key=info.data.get("key"), value=v, unit=None, - content_schema=values.get("content_schema", {}), + content_schema=info.data.get("content_schema", {}), ) return v @@ -209,7 +222,9 @@ async def _evaluate() -> ItemValue | None: if isinstance(self.value, DownloadLink): # generic download link for a file - url: AnyUrl = self.value.download_link + url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + self.value.download_link + ) return url # otherwise, this is a BasicValueTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py index c2ebb56986d..b33e677c0bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py @@ -1,7 +1,8 @@ import logging import re -from typing import Any, Dict, Optional, Tuple +from typing import Any +from common_library.errors_classes import OsparcErrorMixin from models_library.projects_nodes import UnitStr from models_library.utils.json_schema import ( JsonSchemaValidationError, @@ -9,9 +10,8 @@ jsonschema_validate_schema, ) from pint import PintError, UnitRegistry -from pydantic.errors import PydanticValueError -JsonSchemaDict = Dict[str, Any] +JsonSchemaDict = dict[str, Any] log = logging.getLogger(__name__) @@ -22,8 +22,8 @@ # - Use 'code' to discriminate port_validation errors -class PortValueError(PydanticValueError): - code = "port_validation.schema_error" +class PortValueError(OsparcErrorMixin, ValueError): + code = "port_validation.schema_error" # type: ignore msg_template = "Invalid value in port {port_key!r}: {schema_error_message}" # pylint: disable=useless-super-delegation @@ -37,8 +37,8 @@ def __init__(self, *, port_key: str, schema_error: JsonSchemaValidationError): ) -class PortUnitError(PydanticValueError): - code = "port_validation.unit_error" +class PortUnitError(OsparcErrorMixin, ValueError): + code = "port_validation.unit_error" # type: ignore msg_template = "Invalid unit in port {port_key!r}: {pint_error_msg}" # pylint: disable=useless-super-delegation @@ -72,7 +72,7 @@ def _validate_port_value(value, content_schema: JsonSchemaDict): def _validate_port_unit( value, unit, content_schema: JsonSchemaDict, *, ureg: UnitRegistry -) -> Tuple[Any, Optional[UnitStr]]: +) -> tuple[Any, UnitStr | None]: """ - Checks valid 'value' against content_schema - Converts 'value' with 'unit' to unit expected in content_schema @@ -101,7 +101,7 @@ def _validate_port_unit( def validate_port_content( port_key, value: Any, - unit: Optional[UnitStr], + unit: UnitStr | None, content_schema: JsonSchemaDict, ): """A port content is all datasets injected to a given port. Currently only diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py index 2855e8a253e..9fb13510afb 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py @@ -1,38 +1,35 @@ from collections.abc import ItemsView, Iterator, KeysView, ValuesView from models_library.services_types import ServicePortKey -from pydantic import BaseModel +from pydantic import RootModel from ..node_ports_common.exceptions import UnboundPortError from .port import Port -class BasePortsMapping(BaseModel): - __root__: dict[ServicePortKey, Port] - +class BasePortsMapping(RootModel[dict[ServicePortKey, Port]]): def __getitem__(self, key: int | ServicePortKey) -> Port: - if isinstance(key, int): - if key < len(self.__root__): - key = list(self.__root__.keys())[key] - if key not in self.__root__: + if isinstance(key, int) and key < len(self.root): + key = list(self.root.keys())[key] + if key not in self.root: raise UnboundPortError(key) assert isinstance(key, str) # nosec - return self.__root__[key] + return self.root[key] def __iter__(self) -> Iterator[ServicePortKey]: # type: ignore - return iter(self.__root__) + return iter(self.root) def keys(self) -> KeysView[ServicePortKey]: - return self.__root__.keys() + return self.root.keys() def items(self) -> ItemsView[ServicePortKey, Port]: - return self.__root__.items() + return self.root.items() def values(self) -> ValuesView[Port]: - return self.__root__.values() + return self.root.values() def __len__(self) -> int: - return self.__root__.__len__() + return self.root.__len__() class InputsList(BasePortsMapping): diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py index daa4c9aaa3e..f4d74711e18 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py @@ -115,7 +115,7 @@ async def dump(nodeports: Nodeports) -> None: "dumping node_ports_v2 object %s", pformat(nodeports, indent=2), ) - _nodeports_cfg = nodeports.dict( + _nodeports_cfg = nodeports.model_dump( include={"internal_inputs", "internal_outputs"}, by_alias=True, exclude_unset=True, diff --git a/packages/simcore-sdk/tests/helpers/utils_port_v2.py b/packages/simcore-sdk/tests/helpers/utils_port_v2.py index 556e0eb4ced..23298f6b175 100644 --- a/packages/simcore-sdk/tests/helpers/utils_port_v2.py +++ b/packages/simcore-sdk/tests/helpers/utils_port_v2.py @@ -45,5 +45,5 @@ def create_valid_port_mapping( key=key_for_file_port, fileToKeyMap={file_to_key: key_for_file_port} if file_to_key else None, ) - port_mapping = mapping_class(**{"__root__": port_cfgs}) + port_mapping = mapping_class(**{"root": port_cfgs}) return port_mapping diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index d5f6cd7227a..92b6afaa81b 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -142,7 +142,7 @@ async def _create(file_path: Path) -> dict[str, Any]: async with ClientSession() as session: async with session.put(url) as resp: resp.raise_for_status() - presigned_links_enveloped = Envelope[FileUploadSchema].parse_obj( + presigned_links_enveloped = Envelope[FileUploadSchema].model_validate( await resp.json() ) assert presigned_links_enveloped.data diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 9cd1ce32de4..56f696bb46d 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -1,699 +1,699 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments -# pylint:disable=protected-access - -import filecmp -from collections.abc import Awaitable, Callable -from pathlib import Path -from typing import Any -from uuid import uuid4 - -import pytest -from aiohttp import ClientError -from faker import Faker -from models_library.projects_nodes_io import ( - LocationID, - SimcoreS3DirectoryID, - SimcoreS3FileID, -) -from models_library.users import UserID -from pydantic import BaseModel, ByteSize, parse_obj_as -from pytest_mock import MockerFixture -from pytest_simcore.helpers.parametrizations import byte_size_ids -from servicelib.progress_bar import ProgressBarData -from settings_library.aws_s3_cli import AwsS3CliSettings -from settings_library.r_clone import RCloneSettings -from simcore_sdk.node_ports_common import exceptions, filemanager -from simcore_sdk.node_ports_common.aws_s3_cli import AwsS3CliFailedError -from simcore_sdk.node_ports_common.filemanager import UploadedFile, UploadedFolder -from simcore_sdk.node_ports_common.r_clone import RCloneFailedError -from yarl import URL - -pytest_simcore_core_services_selection = [ - "migration", - "postgres", - "storage", - "redis", -] - -pytest_simcore_ops_services_selection = ["minio", "adminer"] - - -class _SyncSettings(BaseModel): - r_clone_settings: RCloneSettings | None - aws_s3_cli_settings: AwsS3CliSettings | None - - -@pytest.fixture( - params=[(True, False), (False, True), (False, False)], - ids=[ - "RClone enabled", - "AwsS3Cli enabled", - "Both RClone and AwsS3Cli disabled", - ], -) -def optional_sync_settings( - r_clone_settings: RCloneSettings, - aws_s3_cli_settings: AwsS3CliSettings, - request: pytest.FixtureRequest, -) -> _SyncSettings: - _rclone_enabled, _aws_s3_cli_enabled = request.param - - _r_clone_settings = r_clone_settings if _rclone_enabled else None - _aws_s3_cli_settings = aws_s3_cli_settings if _aws_s3_cli_enabled else None - - return _SyncSettings( - r_clone_settings=_r_clone_settings, aws_s3_cli_settings=_aws_s3_cli_settings - ) - - -def _file_size(size_str: str, **pytest_params): - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - _file_size("103Mib"), - _file_size("1003Mib", marks=pytest.mark.heavy_load), - _file_size("7Gib", marks=pytest.mark.heavy_load), - ], - ids=byte_size_ids, -) -async def test_valid_upload_download( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - file_size: ByteSize, - create_file_of_size: Callable[[ByteSize, str], Path], - optional_sync_settings: _SyncSettings, - simcore_services_ready: None, - storage_service: URL, - faker: Faker, -): - file_path = create_file_of_size(file_size, "test.test") - - file_id = create_valid_file_uuid("", file_path) - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - # pylint: disable=protected-access - assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 - assert store_id == s3_simcore_location - assert e_tag - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - download_folder = Path(tmpdir) / "downloads" - download_file_path = await filemanager.download_path_from_s3( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 - assert download_file_path.exists() - assert download_file_path.name == "test.test" - assert filecmp.cmp(download_file_path, file_path) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - _file_size("103Mib"), - ], - ids=byte_size_ids, -) -async def test_valid_upload_download_using_file_object( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - file_size: ByteSize, - create_file_of_size: Callable[[ByteSize, str], Path], - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = create_file_of_size(file_size, "test.test") - - file_id = create_valid_file_uuid("", file_path) - with file_path.open("rb") as file_object: - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=filemanager.UploadableFileObject( - file_object, file_path.name, file_path.stat().st_size - ), - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - download_folder = Path(tmpdir) / "downloads" - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: - download_file_path = await filemanager.download_path_from_s3( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 - assert download_file_path.exists() - assert download_file_path.name == "test.test" - assert filecmp.cmp(download_file_path, file_path) - - -@pytest.fixture -def mocked_upload_file_raising_exceptions(mocker: MockerFixture) -> None: - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", - autospec=True, - side_effect=RCloneFailedError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", - autospec=True, - side_effect=ClientError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.aws_s3_cli.sync_local_to_s3", - autospec=True, - side_effect=AwsS3CliFailedError, - ) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_failed_upload_is_properly_removed_from_storage( - node_ports_config: None, - create_file_of_size: Callable[[ByteSize], Path], - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - file_size: ByteSize, - user_id: UserID, - mocked_upload_file_raising_exceptions: None, -): - file_path = create_file_of_size(file_size) - file_id = create_valid_file_uuid("", file_path) - with pytest.raises(exceptions.S3TransferError): - await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - with pytest.raises(exceptions.S3InvalidPathError): - await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_failed_upload_after_valid_upload_keeps_last_valid_state( - node_ports_config: None, - create_file_of_size: Callable[[ByteSize], Path], - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - file_size: ByteSize, - user_id: UserID, - mocker: MockerFixture, -): - # upload a valid file - file_path = create_file_of_size(file_size) - file_id = create_valid_file_uuid("", file_path) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - # check the file is correctly uploaded - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - # now start an invalid update by generating an exception while uploading the same file - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", - autospec=True, - side_effect=RCloneFailedError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", - autospec=True, - side_effect=ClientError, - ) - with pytest.raises(exceptions.S3TransferError): - await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - # the file shall be back to its original state - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - -async def test_invalid_file_path( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - store = s3_simcore_location - with pytest.raises(FileNotFoundError): - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=file_id, - path_to_upload=Path(tmpdir) / "some other file.txt", - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -async def test_errors_upon_invalid_file_identifiers( - node_ports_config: None, - tmpdir: Path, - user_id: UserID, - project_id: str, - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - store = s3_simcore_location - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - invalid_s3_path = SimcoreS3FileID("") - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_s3_path, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 - invalid_file_id = SimcoreS3FileID("file_id") - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - invalid_s3_path = SimcoreS3FileID("") - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_s3_path, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -async def test_invalid_store( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - store = "somefunkystore" - with pytest.raises(exceptions.S3InvalidStore): - await filemanager.upload_path( - user_id=user_id, - store_id=None, - store_name=store, # type: ignore - s3_object=file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=None, - store_name=store, # type: ignore - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -@pytest.fixture( - params=[True, False], - ids=["with RClone", "with AwsS3Cli"], -) -def sync_settings( - r_clone_settings: RCloneSettings, - aws_s3_cli_settings: AwsS3CliSettings, - request: pytest.FixtureRequest, -) -> _SyncSettings: - is_rclone_enabled = request.param - - return _SyncSettings( - r_clone_settings=r_clone_settings if is_rclone_enabled else None, - aws_s3_cli_settings=aws_s3_cli_settings if not is_rclone_enabled else None, - ) - - -@pytest.mark.parametrize("is_directory", [False, True]) -async def test_valid_metadata( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - sync_settings: _SyncSettings, - is_directory: bool, -): - # first we go with a non-existing file - file_path = Path(tmpdir) / "a-subdir" / "test.test" - file_path.parent.mkdir(parents=True, exist_ok=True) - - path_to_upload = file_path.parent if is_directory else file_path - - file_id = create_valid_file_uuid("", path_to_upload) - assert file_path.exists() is False - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, - store_id=s3_simcore_location, - s3_object=file_id, - is_directory=is_directory, - ) - assert is_metadata_present is False - - # now really create the file and upload it - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", path_to_upload) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=path_to_upload, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - if is_directory: - assert isinstance(upload_result, UploadedFolder) - else: - assert isinstance(upload_result, UploadedFile) - assert upload_result.store_id == s3_simcore_location - assert upload_result.etag - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, - store_id=s3_simcore_location, - s3_object=file_id, - is_directory=is_directory, - ) - - assert is_metadata_present is True - - -@pytest.mark.parametrize( - "fct, extra_kwargs", - [ - (filemanager.entry_exists, {"is_directory": False}), - (filemanager.delete_file, {}), - (filemanager.get_file_metadata, {}), - ], -) -async def test_invalid_call_raises_exception( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - fct: Callable[[int, str, str, Any | None], Awaitable], - extra_kwargs: dict[str, Any], -): - file_path = Path(tmpdir) / "test.test" - file_id = create_valid_file_uuid("", file_path) - assert file_path.exists() is False - - with pytest.raises(exceptions.StorageInvalidCall): - await fct( - user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore - ) - with pytest.raises(exceptions.StorageInvalidCall): - await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore - with pytest.raises(exceptions.StorageInvalidCall): - await fct( - user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore - ) - - -async def test_delete_file( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - storage_service: URL, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False - ) - assert is_metadata_present is True - - await filemanager.delete_file( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - - # check that it disappeared - assert ( - await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False - ) - is False - ) - - -@pytest.mark.parametrize("files_in_folder", [1, 10]) -async def test_upload_path_source_is_a_folder( - node_ports_config: None, - project_id: str, - tmp_path: Path, - faker: Faker, - user_id: int, - s3_simcore_location: LocationID, - files_in_folder: int, - sync_settings: _SyncSettings, -): - source_dir = tmp_path / f"source-{faker.uuid4()}" - source_dir.mkdir(parents=True, exist_ok=True) - - download_dir = tmp_path / f"download-{faker.uuid4()}" - download_dir.mkdir(parents=True, exist_ok=True) - - for i in range(files_in_folder): - (source_dir / f"file-{i}.txt").write_text("1") - - directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( - f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" - ) - s3_object = SimcoreS3FileID(directory_id) - - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=s3_object, - path_to_upload=source_dir, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFolder) - assert source_dir.exists() - - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_name=None, - store_id=s3_simcore_location, - s3_object=s3_object, - local_path=download_dir, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - assert download_dir.exists() - - # ensure all files in download and source directory are the same - file_names: set = {f.name for f in source_dir.glob("*")} & { - f.name for f in download_dir.glob("*") - } - for file_name in file_names: - filecmp.cmp(source_dir / file_name, download_dir / file_name, shallow=False) +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=protected-access + +import filecmp +from collections.abc import Awaitable, Callable +from pathlib import Path +from typing import Any +from uuid import uuid4 + +import pytest +from aiohttp import ClientError +from faker import Faker +from models_library.projects_nodes_io import ( + LocationID, + SimcoreS3DirectoryID, + SimcoreS3FileID, +) +from models_library.users import UserID +from pydantic import BaseModel, ByteSize, parse_obj_as +from pytest_mock import MockerFixture +from pytest_simcore.helpers.parametrizations import byte_size_ids +from servicelib.progress_bar import ProgressBarData +from settings_library.aws_s3_cli import AwsS3CliSettings +from settings_library.r_clone import RCloneSettings +from simcore_sdk.node_ports_common import exceptions, filemanager +from simcore_sdk.node_ports_common.aws_s3_cli import AwsS3CliFailedError +from simcore_sdk.node_ports_common.filemanager import UploadedFile, UploadedFolder +from simcore_sdk.node_ports_common.r_clone import RCloneFailedError +from yarl import URL + +pytest_simcore_core_services_selection = [ + "migration", + "postgres", + "storage", + "redis", +] + +pytest_simcore_ops_services_selection = ["minio", "adminer"] + + +class _SyncSettings(BaseModel): + r_clone_settings: RCloneSettings | None + aws_s3_cli_settings: AwsS3CliSettings | None + + +@pytest.fixture( + params=[(True, False), (False, True), (False, False)], + ids=[ + "RClone enabled", + "AwsS3Cli enabled", + "Both RClone and AwsS3Cli disabled", + ], +) +def optional_sync_settings( + r_clone_settings: RCloneSettings, + aws_s3_cli_settings: AwsS3CliSettings, + request: pytest.FixtureRequest, +) -> _SyncSettings: + _rclone_enabled, _aws_s3_cli_enabled = request.param + + _r_clone_settings = r_clone_settings if _rclone_enabled else None + _aws_s3_cli_settings = aws_s3_cli_settings if _aws_s3_cli_enabled else None + + return _SyncSettings( + r_clone_settings=_r_clone_settings, aws_s3_cli_settings=_aws_s3_cli_settings + ) + + +def _file_size(size_str: str, **pytest_params): + return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + _file_size("103Mib"), + _file_size("1003Mib", marks=pytest.mark.heavy_load), + _file_size("7Gib", marks=pytest.mark.heavy_load), + ], + ids=byte_size_ids, +) +async def test_valid_upload_download( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize, str], Path], + optional_sync_settings: _SyncSettings, + simcore_services_ready: None, + storage_service: URL, + faker: Faker, +): + file_path = create_file_of_size(file_size, "test.test") + + file_id = create_valid_file_uuid("", file_path) + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + # pylint: disable=protected-access + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + assert store_id == s3_simcore_location + assert e_tag + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + download_folder = Path(tmpdir) / "downloads" + download_file_path = await filemanager.download_path_from_s3( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 + assert download_file_path.exists() + assert download_file_path.name == "test.test" + assert filecmp.cmp(download_file_path, file_path) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + _file_size("103Mib"), + ], + ids=byte_size_ids, +) +async def test_valid_upload_download_using_file_object( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize, str], Path], + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = create_file_of_size(file_size, "test.test") + + file_id = create_valid_file_uuid("", file_path) + with file_path.open("rb") as file_object: + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=filemanager.UploadableFileObject( + file_object, file_path.name, file_path.stat().st_size + ), + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + download_folder = Path(tmpdir) / "downloads" + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + download_file_path = await filemanager.download_path_from_s3( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + assert download_file_path.exists() + assert download_file_path.name == "test.test" + assert filecmp.cmp(download_file_path, file_path) + + +@pytest.fixture +def mocked_upload_file_raising_exceptions(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", + autospec=True, + side_effect=RCloneFailedError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", + autospec=True, + side_effect=ClientError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.aws_s3_cli.sync_local_to_s3", + autospec=True, + side_effect=AwsS3CliFailedError, + ) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + ], + ids=byte_size_ids, +) +async def test_failed_upload_is_properly_removed_from_storage( + node_ports_config: None, + create_file_of_size: Callable[[ByteSize], Path], + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + file_size: ByteSize, + user_id: UserID, + mocked_upload_file_raising_exceptions: None, +): + file_path = create_file_of_size(file_size) + file_id = create_valid_file_uuid("", file_path) + with pytest.raises(exceptions.S3TransferError): + await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + with pytest.raises(exceptions.S3InvalidPathError): + await filemanager.get_file_metadata( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + ], + ids=byte_size_ids, +) +async def test_failed_upload_after_valid_upload_keeps_last_valid_state( + node_ports_config: None, + create_file_of_size: Callable[[ByteSize], Path], + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + file_size: ByteSize, + user_id: UserID, + mocker: MockerFixture, +): + # upload a valid file + file_path = create_file_of_size(file_size) + file_id = create_valid_file_uuid("", file_path) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + # check the file is correctly uploaded + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + # now start an invalid update by generating an exception while uploading the same file + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", + autospec=True, + side_effect=RCloneFailedError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", + autospec=True, + side_effect=ClientError, + ) + with pytest.raises(exceptions.S3TransferError): + await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + # the file shall be back to its original state + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + +async def test_invalid_file_path( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + store = s3_simcore_location + with pytest.raises(FileNotFoundError): + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=file_id, + path_to_upload=Path(tmpdir) / "some other file.txt", + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +async def test_errors_upon_invalid_file_identifiers( + node_ports_config: None, + tmpdir: Path, + user_id: UserID, + project_id: str, + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + store = s3_simcore_location + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + invalid_s3_path = SimcoreS3FileID("") + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_s3_path, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 + invalid_file_id = SimcoreS3FileID("file_id") + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + invalid_s3_path = SimcoreS3FileID("") + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_s3_path, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +async def test_invalid_store( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + store = "somefunkystore" + with pytest.raises(exceptions.S3InvalidStore): + await filemanager.upload_path( + user_id=user_id, + store_id=None, + store_name=store, # type: ignore + s3_object=file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=None, + store_name=store, # type: ignore + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +@pytest.fixture( + params=[True, False], + ids=["with RClone", "with AwsS3Cli"], +) +def sync_settings( + r_clone_settings: RCloneSettings, + aws_s3_cli_settings: AwsS3CliSettings, + request: pytest.FixtureRequest, +) -> _SyncSettings: + is_rclone_enabled = request.param + + return _SyncSettings( + r_clone_settings=r_clone_settings if is_rclone_enabled else None, + aws_s3_cli_settings=aws_s3_cli_settings if not is_rclone_enabled else None, + ) + + +@pytest.mark.parametrize("is_directory", [False, True]) +async def test_valid_metadata( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + sync_settings: _SyncSettings, + is_directory: bool, +): + # first we go with a non-existing file + file_path = Path(tmpdir) / "a-subdir" / "test.test" + file_path.parent.mkdir(parents=True, exist_ok=True) + + path_to_upload = file_path.parent if is_directory else file_path + + file_id = create_valid_file_uuid("", path_to_upload) + assert file_path.exists() is False + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, + store_id=s3_simcore_location, + s3_object=file_id, + is_directory=is_directory, + ) + assert is_metadata_present is False + + # now really create the file and upload it + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", path_to_upload) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=path_to_upload, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + if is_directory: + assert isinstance(upload_result, UploadedFolder) + else: + assert isinstance(upload_result, UploadedFile) + assert upload_result.store_id == s3_simcore_location + assert upload_result.etag + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, + store_id=s3_simcore_location, + s3_object=file_id, + is_directory=is_directory, + ) + + assert is_metadata_present is True + + +@pytest.mark.parametrize( + "fct, extra_kwargs", + [ + (filemanager.entry_exists, {"is_directory": False}), + (filemanager.delete_file, {}), + (filemanager.get_file_metadata, {}), + ], +) +async def test_invalid_call_raises_exception( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + fct: Callable[[int, str, str, Any | None], Awaitable], + extra_kwargs: dict[str, Any], +): + file_path = Path(tmpdir) / "test.test" + file_id = create_valid_file_uuid("", file_path) + assert file_path.exists() is False + + with pytest.raises(exceptions.StorageInvalidCall): + await fct( + user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore + ) + with pytest.raises(exceptions.StorageInvalidCall): + await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore + with pytest.raises(exceptions.StorageInvalidCall): + await fct( + user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore + ) + + +async def test_delete_file( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + storage_service: URL, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + ) + assert is_metadata_present is True + + await filemanager.delete_file( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + + # check that it disappeared + assert ( + await filemanager.entry_exists( + user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + ) + is False + ) + + +@pytest.mark.parametrize("files_in_folder", [1, 10]) +async def test_upload_path_source_is_a_folder( + node_ports_config: None, + project_id: str, + tmp_path: Path, + faker: Faker, + user_id: int, + s3_simcore_location: LocationID, + files_in_folder: int, + sync_settings: _SyncSettings, +): + source_dir = tmp_path / f"source-{faker.uuid4()}" + source_dir.mkdir(parents=True, exist_ok=True) + + download_dir = tmp_path / f"download-{faker.uuid4()}" + download_dir.mkdir(parents=True, exist_ok=True) + + for i in range(files_in_folder): + (source_dir / f"file-{i}.txt").write_text("1") + + directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( + f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" + ) + s3_object = SimcoreS3FileID(directory_id) + + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=s3_object, + path_to_upload=source_dir, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFolder) + assert source_dir.exists() + + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_name=None, + store_id=s3_simcore_location, + s3_object=s3_object, + local_path=download_dir, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + assert download_dir.exists() + + # ensure all files in download and source directory are the same + file_names: set = {f.name for f in source_dir.glob("*")} & { + f.name for f in download_dir.glob("*") + } + for file_name in file_names: + filecmp.cmp(source_dir / file_name, download_dir / file_name, shallow=False) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index a9016609d13..ec24f271394 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -28,6 +28,7 @@ SimcoreS3FileID, ) from models_library.services_types import ServicePortKey +from pydantic import TypeAdapter from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings from simcore_sdk import node_ports_v2 @@ -90,7 +91,7 @@ async def _check_port_valid( assert port.value assert isinstance(port.value, DownloadLink | PortLink | BaseFileLink) assert ( - port.value.dict(by_alias=True, exclude_unset=True) + port.value.model_dump(by_alias=True, exclude_unset=True) == port_values[key_name] ) else: @@ -224,7 +225,7 @@ async def test_port_value_accessors( item_pytype: type, option_r_clone_settings: RCloneSettings | None, ): # pylint: disable=W0613, W0621 - item_key = ServicePortKey("some_key") + item_key = TypeAdapter(ServicePortKey).validate_python("some_key") config_dict, _, _ = create_special_configuration( inputs=[(item_key, item_type, item_value)], outputs=[(item_key, item_type, None)], @@ -299,17 +300,26 @@ async def test_port_file_accessors( ) await check_config_valid(PORTS, config_dict) assert ( - await (await PORTS.outputs)[ServicePortKey("out_34")].get() is None + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() + is None ) # check emptyness with pytest.raises(exceptions.S3InvalidPathError): - await (await PORTS.inputs)[ServicePortKey("in_1")].get() + await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() # this triggers an upload to S3 + configuration change - await (await PORTS.outputs)[ServicePortKey("out_34")].set(item_value) + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].set(item_value) # this is the link to S3 storage - value = (await PORTS.outputs)[ServicePortKey("out_34")].value + value = (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].value assert isinstance(value, DownloadLink | PortLink | BaseFileLink) - received_file_link = value.dict(by_alias=True, exclude_unset=True) + received_file_link = value.model_dump(by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert ( received_file_link["path"] @@ -322,12 +332,21 @@ async def test_port_file_accessors( # this triggers a download from S3 to a location in /tempdir/simcorefiles/item_key assert isinstance( - await (await PORTS.outputs)[ServicePortKey("out_34")].get(), item_pytype + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get(), + item_pytype, ) - downloaded_file = await (await PORTS.outputs)[ServicePortKey("out_34")].get() + downloaded_file = await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() assert isinstance(downloaded_file, Path) assert downloaded_file.exists() - assert str(await (await PORTS.outputs)[ServicePortKey("out_34")].get()).startswith( + assert str( + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() + ).startswith( str( Path( tempfile.gettempdir(), @@ -472,9 +491,16 @@ async def test_get_value_from_previous_node( ) await check_config_valid(PORTS, config_dict) - input_value = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + input_value = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(input_value, item_pytype) - assert await (await PORTS.inputs)[ServicePortKey("in_15")].get() == item_value + assert ( + await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() + == item_value + ) @pytest.mark.parametrize( @@ -516,7 +542,9 @@ async def test_get_file_from_previous_node( r_clone_settings=option_r_clone_settings, ) await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -577,7 +605,9 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name( postgres_db, project_id, this_node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -637,7 +667,9 @@ async def test_file_mapping( postgres_db, project_id, node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_1")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -648,7 +680,9 @@ async def test_file_mapping( ) # let's get it a second time to see if replacing works - file_path = await (await PORTS.inputs)[ServicePortKey("in_1")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -665,9 +699,11 @@ async def test_file_mapping( assert isinstance(file_path, Path) await PORTS.set_file_by_keymap(file_path) file_id = create_valid_file_uuid("out_1", file_path) - value = (await PORTS.outputs)[ServicePortKey("out_1")].value + value = (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_1") + ].value assert isinstance(value, DownloadLink | PortLink | BaseFileLink) - received_file_link = value.dict(by_alias=True, exclude_unset=True) + received_file_link = value.model_dump(by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert received_file_link["path"] == file_id # received a new eTag @@ -720,15 +756,19 @@ async def test_regression_concurrent_port_update_fails( # when writing in serial these are expected to work for item_key, _, _ in outputs: - await (await PORTS.outputs)[ServicePortKey(item_key)].set(int_item_value) - assert (await PORTS.outputs)[ServicePortKey(item_key)].value == int_item_value + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].set(int_item_value) + assert (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].value == int_item_value # when writing in parallel and reading back, # they fail, with enough concurrency async def _upload_create_task(item_key: str) -> None: - await (await PORTS.outputs)[ServicePortKey(item_key)].set( - parallel_int_item_value - ) + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].set(parallel_int_item_value) # updating in parallel creates a race condition results = await gather( @@ -741,7 +781,7 @@ async def _upload_create_task(item_key: str) -> None: with pytest.raises(AssertionError) as exc_info: # noqa: PT012 for item_key, _, _ in outputs: assert (await PORTS.outputs)[ - ServicePortKey(item_key) + TypeAdapter(ServicePortKey).validate_python(item_key) ].value == parallel_int_item_value assert exc_info.value.args[0].startswith( @@ -773,7 +813,7 @@ async def test_batch_update_inputs_outputs( async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await PORTS.set_multiple( { - ServicePortKey(port.key): (k, None) + TypeAdapter(ServicePortKey).validate_python(port.key): (k, None) for k, port in enumerate((await PORTS.outputs).values()) }, progress_bar=progress_bar, @@ -782,7 +822,7 @@ async def test_batch_update_inputs_outputs( assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 await PORTS.set_multiple( { - ServicePortKey(port.key): (k, None) + TypeAdapter(ServicePortKey).validate_python(port.key): (k, None) for k, port in enumerate((await PORTS.inputs).values(), start=1000) }, progress_bar=progress_bar, @@ -793,18 +833,38 @@ async def test_batch_update_inputs_outputs( ports_inputs = await PORTS.inputs for k, asd in enumerate(outputs): item_key, _, _ = asd - assert ports_outputs[ServicePortKey(item_key)].value == k - assert await ports_outputs[ServicePortKey(item_key)].get() == k + assert ( + ports_outputs[TypeAdapter(ServicePortKey).validate_python(item_key)].value + == k + ) + assert ( + await ports_outputs[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].get() + == k + ) for k, asd in enumerate(inputs, start=1000): item_key, _, _ = asd - assert ports_inputs[ServicePortKey(item_key)].value == k - assert await ports_inputs[ServicePortKey(item_key)].get() == k + assert ( + ports_inputs[TypeAdapter(ServicePortKey).validate_python(item_key)].value + == k + ) + assert ( + await ports_inputs[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].get() + == k + ) # test missing key raises error async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: with pytest.raises(UnboundPortError): await PORTS.set_multiple( - {ServicePortKey("missing_key_in_both"): (123132, None)}, + { + TypeAdapter(ServicePortKey).validate_python( + "missing_key_in_both" + ): (123132, None) + }, progress_bar=progress_bar, ) diff --git a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py index c1edb4f183c..a578d410605 100644 --- a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py @@ -40,7 +40,7 @@ def _create_files(number: int, folder: Path) -> list[Path]: @pytest.fixture def r_clone_settings(faker: Faker) -> RCloneSettings: - return RCloneSettings.parse_obj( + return RCloneSettings.model_validate( { "R_CLONE_S3": { "S3_ENDPOINT": faker.url(), diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index 2d32d345ffa..0540daa58d1 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -19,7 +19,7 @@ UploadedPart, ) from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock import MockerFixture from servicelib.aiohttp import status from servicelib.progress_bar import ProgressBarData @@ -212,8 +212,7 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch assert "UploadId" in response upload_id = response["UploadId"] - upload_links = parse_obj_as( - list[AnyUrl], + upload_links = TypeAdapter(list[AnyUrl]).validate_python( await asyncio.gather( *[ aiobotocore_s3_client.generate_presigned_url( @@ -234,8 +233,8 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch chunk_size=chunk_size, urls=upload_links, links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, faker.uri()), - complete_upload=parse_obj_as(AnyUrl, faker.uri()), + abort_upload=TypeAdapter(AnyUrl).validate_python(faker.uri()), + complete_upload=TypeAdapter(AnyUrl).validate_python(faker.uri()), ), ) @@ -245,7 +244,12 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch @pytest.mark.skip(reason="this will allow to reproduce an issue") @pytest.mark.parametrize( "file_size,used_chunk_size", - [(parse_obj_as(ByteSize, 21800510238), parse_obj_as(ByteSize, 10485760))], + [ + ( + TypeAdapter(ByteSize).validate_python(21800510238), + TypeAdapter(ByteSize).validate_python(10485760), + ) + ], ) async def test_upload_file_to_presigned_links( client_session: ClientSession, @@ -253,6 +257,7 @@ async def test_upload_file_to_presigned_links( create_file_of_size: Callable[[ByteSize], Path], file_size: ByteSize, used_chunk_size: ByteSize, + faker: Faker, ): """This test is here to reproduce the issue https://github.com/ITISFoundation/osparc-simcore/issues/3531 One theory is that something might be wrong in how the chunking is done and that AWS times out @@ -267,11 +272,11 @@ async def test_upload_file_to_presigned_links( """ local_file = create_file_of_size(file_size) num_links = 2080 - effective_chunk_size = parse_obj_as(ByteSize, local_file.stat().st_size / num_links) + effective_chunk_size = TypeAdapter(ByteSize).validate_python(local_file.stat().st_size / num_links) assert effective_chunk_size <= used_chunk_size upload_links = await create_upload_links(num_links, used_chunk_size) assert len(upload_links.urls) == num_links - async with ProgressBarData(num_steps=1) as progress_bar: + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_links( session=client_session, file_upload_links=upload_links, diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py index 5116311ae01..95b114ae563 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py @@ -2,7 +2,7 @@ from uuid import uuid4 import pytest -from pydantic import ValidationError +from pydantic import TypeAdapter, ValidationError from simcore_sdk.node_ports_v2.links import DownloadLink, FileLink, PortLink diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 91609476b9c..856b4b268b1 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -3,9 +3,11 @@ # pylint:disable=redefined-outer-name # pylint:disable=protected-access +import asyncio from pathlib import Path from typing import Any, Callable +from pydantic import ValidationError import pytest from faker import Faker from pytest_mock import MockFixture @@ -50,6 +52,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -62,6 +65,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=auto_update, @@ -101,6 +105,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -113,6 +118,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -182,6 +188,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -194,6 +201,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -215,3 +223,54 @@ async def test_node_ports_v2_packages( db_manager = mock_db_manager(default_configuration) node_ports = await ports(user_id, project_id, node_uuid) node_ports = await ports(user_id, project_id, node_uuid, db_manager=db_manager) + + +@pytest.fixture +def mock_port_set(mocker: MockFixture)->None: + async def _always_raise_error(*args, **kwargs): + async def _i_raise_errors(): + raise ValidationError("invalid") + return asyncio.create_task(_i_raise_errors()) + + mocker.patch("simcore_sdk.node_ports_v2.port.Port._set", side_effect=_always_raise_error) + +async def test_node_ports_v2_set_multiple_catch_multiple_failing_set_ports( + mock_port_set:None, + mock_db_manager: Callable, + default_configuration: dict[str, Any], + user_id: int, + project_id: str, + node_uuid: str, + faker: Faker, +): + db_manager = mock_db_manager(default_configuration) + + original_inputs = create_valid_port_mapping(InputsList, suffix="original") + original_outputs = create_valid_port_mapping(OutputsList, suffix="original") + + + async def _mock_callback(*args,**kwargs): + pass + + node_ports = Nodeports( + inputs=original_inputs, + outputs=original_outputs, + db_manager=db_manager, + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + io_log_redirect_cb=None, + save_to_db_cb=_mock_callback, + node_port_creator_cb=_mock_callback, + auto_update=False, + ) + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + with pytest.raises(ValidationError): + await node_ports.set_multiple( + { + port.key: (port.value, None) + for port in list(original_inputs.values()) + + list(original_outputs.values()) + }, + progress_bar=progress_bar, + ) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 8485e19b74b..f8dbe5d0e8d 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -22,8 +22,7 @@ from faker import Faker from models_library.api_schemas_storage import FileMetaDataGet from models_library.projects_nodes_io import LocationID -from pydantic import parse_obj_as -from pydantic.error_wrappers import ValidationError +from pydantic import TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture from servicelib.progress_bar import ProgressBarData from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB @@ -218,8 +217,8 @@ def e_tag_fixture() -> str: async def mock_filemanager(mocker: MockerFixture, e_tag: str, faker: Faker) -> None: mocker.patch( "simcore_sdk.node_ports_common.filemanager._get_file_meta_data", - return_value=parse_obj_as( - FileMetaDataGet, FileMetaDataGet.Config.schema_extra["examples"][0] + return_value=TypeAdapter(FileMetaDataGet).validate_python( + FileMetaDataGet.model_config["json_schema_extra"]["examples"][0], ), ) mocker.patch( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py index 10c074591fc..3746520f42c 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py @@ -4,20 +4,21 @@ from collections import deque from pprint import pprint -from typing import Any, Dict, List, Type, Union +from typing import Any import pytest from models_library.services import ServiceInput -from pydantic import ValidationError, confloat, schema_of +from pydantic import Field, ValidationError, schema_of from simcore_sdk.node_ports_v2 import exceptions from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.ports_mapping import InputsList, OutputsList +from typing_extensions import Annotated from utils_port_v2 import create_valid_port_config @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) -def test_empty_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): - port_mapping = port_class(__root__={}) +def test_empty_ports_mapping(port_class: type[InputsList | OutputsList]): + port_mapping = port_class(root={}) assert not port_mapping.items() assert not port_mapping.values() assert not port_mapping.keys() @@ -28,17 +29,17 @@ def test_empty_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) -def test_filled_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): - port_cfgs: Dict[str, Any] = {} +def test_filled_ports_mapping(port_class: type[InputsList | OutputsList]): + port_cfgs: dict[str, Any] = {} for t in ["integer", "number", "boolean", "string"]: port = create_valid_port_config(t) port_cfgs[port["key"]] = port port_cfgs["some_file"] = create_valid_port_config("data:*/*", key="some_file") - port_mapping = port_class(__root__=port_cfgs) + port_mapping = port_class(root=port_cfgs) # two ways to construct instances of __root__ - assert port_class.parse_obj(port_cfgs) == port_mapping + assert port_class.model_validate(port_cfgs) == port_mapping assert len(port_mapping) == len(port_cfgs) for port_key, port_value in port_mapping.items(): @@ -60,8 +61,8 @@ def test_filled_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): def test_io_ports_are_not_aliases(): # prevents creating alises as InputsList = PortsMappings - inputs = InputsList(__root__={}) - outputs = OutputsList(__root__={}) + inputs = InputsList(root={}) + outputs = OutputsList(root={}) assert isinstance(inputs, InputsList) assert not isinstance(inputs, OutputsList) @@ -71,10 +72,10 @@ def test_io_ports_are_not_aliases(): @pytest.fixture -def fake_port_meta() -> Dict[str, Any]: +def fake_port_meta() -> dict[str, Any]: """Service port metadata: defines a list of non-negative numbers""" schema = schema_of( - List[confloat(ge=0)], + list[Annotated[float, Field(ge=0)]], title="list[non-negative number]", ) schema.update( @@ -83,10 +84,10 @@ def fake_port_meta() -> Dict[str, Any]: ) port_model = ServiceInput.from_json_schema(port_schema=schema) - return port_model.dict(exclude_unset=True, by_alias=True) + return port_model.model_dump(exclude_unset=True, by_alias=True) -def test_validate_port_value_against_schema(fake_port_meta: Dict[str, Any]): +def test_validate_port_value_against_schema(fake_port_meta: dict[str, Any]): # A simcore-sdk Port instance is a combination of both # - the port's metadata # - the port's value @@ -109,19 +110,19 @@ def test_validate_port_value_against_schema(fake_port_meta: Dict[str, Any]): assert error["loc"] == ("value",) assert "-2 is less than the minimum of 0" in error["msg"] - assert error["type"] == "value_error.port_validation.schema_error" + assert error["type"] == "value_error" assert "ctx" in error - assert error["ctx"]["port_key"] == "port_1" + assert error["ctx"]["error"].port_key == "port_1" - schema_error_message = error["ctx"]["schema_error_message"] - schema_error_path = error["ctx"]["schema_error_path"] + schema_error_message = error["ctx"]["error"].schema_error_message + schema_error_path = error["ctx"]["error"].schema_error_path assert schema_error_message in error["msg"] assert schema_error_path == deque([1]) -def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): +def test_validate_iolist_against_schema(fake_port_meta: dict[str, Any]): # Check how errors propagate from a single Port to InputsList # reference port @@ -151,7 +152,7 @@ def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): # ---- with pytest.raises(ValidationError) as err_info: - InputsList.parse_obj({p["key"]: p for p in ports}) + InputsList.model_validate({p["key"]: p for p in ports}) # --- assert isinstance(err_info.value, ValidationError) @@ -161,14 +162,13 @@ def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): for error in err_info.value.errors(): error_loc = error["loc"] assert "ctx" in error - port_key = error["ctx"].get("port_key") + port_key = error["ctx"]["error"].port_key # path hierachy - assert error_loc[0] == "__root__", f"{error_loc=}" - assert error_loc[1] == port_key, f"{error_loc=}" - assert error_loc[-1] == "value", f"{error_loc=}" + assert error_loc[0] == port_key, f"{error_loc=}" + assert error_loc[1] == "value", f"{error_loc=}" - assert error["type"] == "value_error.port_validation.schema_error" + assert error["type"] == "value_error" port_with_errors.append(port_key) pprint(error) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py index 41e61669fe5..ee0d19cec90 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py @@ -13,13 +13,14 @@ from unittest.mock import AsyncMock import pytest -from pydantic import BaseModel, conint, schema_of -from pydantic.error_wrappers import ValidationError +from pydantic import BaseModel, Field, schema_of +from pydantic import ValidationError from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.port_validation import ( PortUnitError, validate_port_content, ) +from typing_extensions import Annotated def _replace_value_in_dict(item: Any, original_schema: dict[str, Any]): @@ -128,7 +129,7 @@ async def test_port_with_array_of_object(mocker): mocker.patch.object(Port, "_node_ports", new=AsyncMock()) class A(BaseModel): - i: conint(gt=3) + i: Annotated[int, Field(gt=3)] b: bool = False s: str l: list[int] @@ -142,7 +143,7 @@ class A(BaseModel): "contentSchema": content_schema, } sample = [{"i": 5, "s": "x", "l": [1, 2]}, {"i": 6, "s": "y", "l": [2]}] - expected_value = [A(**i).dict() for i in sample] + expected_value = [A(**i).model_dump() for i in sample] print(json.dumps(port_meta, indent=1)) print(json.dumps(expected_value, indent=1)) @@ -244,7 +245,7 @@ async def test_port_with_units_and_constraints(mocker): print(validation_error) assert validation_error["loc"] == ("value",) # starts with value,! - assert validation_error["type"] == "value_error.port_validation.schema_error" + assert validation_error["type"] == "value_error" assert "-3.14 is less than the minimum of 0" in validation_error["msg"] # inits with None + set_value @@ -256,8 +257,6 @@ async def test_port_with_units_and_constraints(mocker): with pytest.raises(ValidationError) as exc_info: await port.set_value(-3.14) - assert exc_info.value.errors()[0] == validation_error - def test_incident__port_validator_check_value(): # SEE incident https://git.speag.com/oSparc/e2e-testing/-/issues/1) diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index 91e46c5bd61..7786aafe494 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -20,7 +20,7 @@ ) from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.aiohttp import status from simcore_sdk.node_ports_common import exceptions @@ -176,8 +176,8 @@ async def test_get_file_metada( session=session, file_id=file_id, location_id=location_id, user_id=user_id ) assert file_metadata - assert file_metadata == FileMetaDataGet.parse_obj( - FileMetaDataGet.Config.schema_extra["examples"][0] + assert file_metadata == FileMetaDataGet.model_validate( + FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] ) @@ -362,12 +362,28 @@ def test_mode_ports_storage_without_auth( [ (True, _HTTP_URL, _HTTPS_URL), (False, _HTTP_URL, _HTTP_URL), - (True, parse_obj_as(AnyUrl, _HTTP_URL), _HTTPS_URL), - (False, parse_obj_as(AnyUrl, _HTTP_URL), _HTTP_URL), + ( + True, + str(TypeAdapter(AnyUrl).validate_python(_HTTP_URL)).rstrip("/"), + _HTTPS_URL, + ), + ( + False, + str(TypeAdapter(AnyUrl).validate_python(_HTTP_URL)).rstrip("/"), + _HTTP_URL, + ), (True, _HTTPS_URL, _HTTPS_URL), (False, _HTTPS_URL, _HTTPS_URL), - (True, parse_obj_as(AnyUrl, _HTTPS_URL), _HTTPS_URL), - (False, parse_obj_as(AnyUrl, _HTTPS_URL), _HTTPS_URL), + ( + True, + str(TypeAdapter(AnyUrl).validate_python(_HTTPS_URL)).rstrip("/"), + _HTTPS_URL, + ), + ( + False, + str(TypeAdapter(AnyUrl).validate_python(_HTTPS_URL)).rstrip("/"), + _HTTPS_URL, + ), (True, "http://http", "https://http"), (True, "https://http", "https://http"), ], @@ -382,4 +398,4 @@ def test__get_secure_link( is_storage_secure.cache_clear() setenvs_from_dict(monkeypatch, {"STORAGE_SECURE": "1" if storage_secure else "0"}) - assert _get_https_link_if_storage_secure(provided) == expected + assert _get_https_link_if_storage_secure(str(provided)) == expected diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index d1641743383..1ae402aba4e 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -1,5 +1,5 @@ -exceptiongroup==1.2.2 - # via pytest +annotated-types==0.7.0 + # via pydantic iniconfig==2.0.0 # via pytest packaging==24.1 @@ -8,16 +8,20 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in +pydantic-core==2.23.4 + # via pydantic pytest==8.3.2 # via # -r requirements/requirements.in @@ -26,9 +30,13 @@ pytest==8.3.2 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in @@ -40,15 +48,19 @@ pytest-sugar==1.0.0 # via -r requirements/requirements.in pyyaml==6.0.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via pytest typing-extensions==4.12.2 - # via pydantic + # via + # pydantic + # pydantic-core diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index fe4fab279b4..4735ddc8cac 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -1,21 +1,23 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via httpx certifi==2024.7.4 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # httpcore # httpx # osparc-client click==8.1.7 # via typer -exceptiongroup==1.2.2 - # via anyio h11==0.14.0 # via httpcore httpcore==1.0.5 # via httpx httpx==0.27.0 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc idna==3.7 @@ -34,14 +36,23 @@ osparc-client==0.6.6 # via osparc packaging==24.1 # via osparc -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via osparc-client +python-dotenv==1.0.1 + # via pydantic-settings rich==13.7.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -64,8 +75,8 @@ typer==0.12.4 # via -r requirements/../../../packages/settings-library/requirements/_base.in typing-extensions==4.12.2 # via - # anyio # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 68e7f7e9a96..a85e38aa920 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -23,16 +23,28 @@ aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -49,6 +61,8 @@ alembic==1.13.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/_test.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -65,10 +79,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 @@ -78,16 +90,28 @@ attrs==24.2.0 # referencing certifi==2024.7.4 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -110,10 +134,6 @@ docker==7.1.0 # -r requirements/_test.in email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via - # anyio - # pytest fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -152,16 +172,28 @@ jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -181,16 +213,28 @@ multidict==6.0.5 # yarl orjson==3.10.7 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -213,34 +257,78 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -256,16 +344,28 @@ pytest==8.3.2 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -281,19 +381,33 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings pyyaml==6.0.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -302,16 +416,28 @@ pyyaml==6.0.2 # -r requirements/_test.in redis==5.0.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -346,16 +472,28 @@ sniffio==1.3.1 # via anyio sqlalchemy==1.4.53 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -372,8 +510,6 @@ tenacity==9.0.0 # -r requirements/_test.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via pytest toolz==0.12.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -397,25 +533,37 @@ typing-extensions==4.12.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # aiodebug # alembic - # anyio # faststream # flexcache # flexparser # pint # pydantic + # pydantic-core # typer urllib3==2.2.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 24faa87728a..14b66aa5089 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -69,21 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit