From dcb5895e8d9d5bdbab4551f49993749da7ca02b7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 13:15:55 +0200 Subject: [PATCH 001/280] upgrade aws-library --- packages/aws-library/requirements/_base.txt | 7 +- packages/aws-library/requirements/_test.txt | 11 ++- .../src/aws_library/ec2/_models.py | 86 ++++++++++--------- 3 files changed, 63 insertions(+), 41 deletions(-) diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 50f6f157311..69125f953cf 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -32,6 +32,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -105,7 +107,7 @@ orjson==3.10.7 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -120,6 +122,8 @@ pydantic==1.10.17 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -198,6 +202,7 @@ typing-extensions==4.12.2 # aiodebug # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index 213e7ccb561..ace30f36117 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto appdirs==1.4.4 @@ -151,11 +155,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.3 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -268,6 +276,7 @@ typing-extensions==4.12.2 # flexparser # pint # pydantic + # pydantic-core # types-aioboto3 # types-aiobotocore urllib3==2.2.2 diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index fed1f3ea46a..6df4e19b70c 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -1,19 +1,19 @@ import datetime -import re import tempfile from dataclasses import dataclass -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, TypeAlias import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag from pydantic import ( BaseModel, ByteSize, - ConstrainedStr, + ConfigDict, Field, NonNegativeFloat, NonNegativeInt, - validator, + StringConstraints, + field_validator, ) from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType @@ -33,26 +33,26 @@ def __gt__(self, other: "Resources") -> bool: return self.cpus > other.cpus or self.ram > other.ram def __add__(self, other: "Resources") -> "Resources": - return Resources.construct( + return Resources.model_construct( **{ key: a + b for (key, a), b in zip( - self.dict().items(), other.dict().values(), strict=True + self.model_dump().items(), other.model_dump().values(), strict=True ) } ) def __sub__(self, other: "Resources") -> "Resources": - return Resources.construct( + return Resources.model_construct( **{ key: a - b for (key, a), b in zip( - self.dict().items(), other.dict().values(), strict=True + self.model_dump().items(), other.model_dump().values(), strict=True ) } ) - @validator("cpus", pre=True) + @field_validator("cpus", mode="before") @classmethod def _floor_cpus_to_0(cls, v: float) -> float: return max(v, 0) @@ -67,19 +67,26 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str -class AWSTagKey(ConstrainedStr): - # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] - regex = re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$") - min_length = 1 - max_length = 128 - - -class AWSTagValue(ConstrainedStr): - # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] - # quotes []{} were added as it allows to json encode. it seems to be accepted as a value - regex = re.compile(r"^[a-zA-Z0-9\s\+\-=\.,_:/@\"\'\[\]\{\}]*$") - min_length = 0 - max_length = 256 +# see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] +AWSTagKey = Annotated[ + str, + StringConstraints( + min_length=1, + max_length=128, + pattern=r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$", + ), +] + +# see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] +# quotes []{} were added as it allows to json encode. it seems to be accepted as a value +AWSTagValue = Annotated[ + str, + StringConstraints( + min_length=0, + max_length=256, + pattern=r"^[a-zA-Z0-9\s\+\-=\.,_:/@\"\'\[\]\{\}]*$", + ), +] EC2Tags: TypeAlias = dict[AWSTagKey, AWSTagValue] @@ -148,8 +155,23 @@ class EC2InstanceBootSpecific(BaseModel): default=0, description="number of buffer EC2s to keep (defaults to 0)" ) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + @field_validator("custom_boot_scripts") + @classmethod + def validate_bash_calls(cls, v): + try: + with tempfile.NamedTemporaryFile(mode="wt", delete=True) as temp_file: + temp_file.writelines(v) + temp_file.flush() + # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes + sh.bash("-n", temp_file.name) + except sh.ErrorReturnCode as exc: + msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" + raise ValueError(msg) from exc + + return v + + model_config = ConfigDict( + json_schema_extra={ "examples": [ { # just AMI @@ -205,18 +227,4 @@ class Config: }, ] } - - @validator("custom_boot_scripts") - @classmethod - def validate_bash_calls(cls, v): - try: - with tempfile.NamedTemporaryFile(mode="wt", delete=True) as temp_file: - temp_file.writelines(v) - temp_file.flush() - # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes - sh.bash("-n", temp_file.name) - except sh.ErrorReturnCode as exc: - msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" - raise ValueError(msg) from exc - - return v + ) From 362066808e42135fe6737eb838a244209c3fb1cd Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 13:33:31 +0200 Subject: [PATCH 002/280] upgrade dask-task-models-library --- .../requirements/_base.txt | 7 ++- .../requirements/_test.txt | 6 --- .../requirements/_tools.txt | 10 ----- .../container_tasks/docker.py | 11 ++--- .../container_tasks/events.py | 31 ++++++------- .../container_tasks/io.py | 44 +++++++++++-------- .../container_tasks/protocol.py | 28 ++++++------ 7 files changed, 67 insertions(+), 70 deletions(-) diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index 327a04c5678..aea8fcde117 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in attrs==24.2.0 @@ -65,7 +67,7 @@ partd==1.4.2 # via dask psutil==6.0.0 # via distributed -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -73,6 +75,8 @@ pydantic==1.10.17 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 @@ -118,6 +122,7 @@ types-python-dateutil==2.9.0.20240821 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/packages/dask-task-models-library/requirements/_test.txt b/packages/dask-task-models-library/requirements/_test.txt index aa7e91d4d23..521d13265d9 100644 --- a/packages/dask-task-models-library/requirements/_test.txt +++ b/packages/dask-task-models-library/requirements/_test.txt @@ -4,8 +4,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in flexcache==0.3 @@ -67,10 +65,6 @@ six==1.16.0 # python-dateutil termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest typing-extensions==4.12.2 # via # -c requirements/_base.txt diff --git a/packages/dask-task-models-library/requirements/_tools.txt b/packages/dask-task-models-library/requirements/_tools.txt index b76f8083592..b9ee0a3c96d 100644 --- a/packages/dask-task-models-library/requirements/_tools.txt +++ b/packages/dask-task-models-library/requirements/_tools.txt @@ -70,22 +70,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py index 4e9d36df3fb..b4fa976b665 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/docker.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Extra, SecretStr +from pydantic import BaseModel, ConfigDict, SecretStr class DockerBasicAuth(BaseModel): @@ -6,9 +6,9 @@ class DockerBasicAuth(BaseModel): username: str password: SecretStr - class Config: - extra = Extra.forbid - schema_extra = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "server_address": "docker.io", @@ -16,4 +16,5 @@ class Config: "password": "123456", } ] - } + }, + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py index 1455c00cbff..50fdb3144a3 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py @@ -1,10 +1,10 @@ import logging from abc import ABC, abstractmethod -from typing import Any, ClassVar, TypeAlias +from typing import TypeAlias import dask.typing from distributed.worker import get_worker -from pydantic import BaseModel, Extra, validator +from pydantic import BaseModel, ConfigDict, field_validator from .protocol import TaskOwner @@ -19,8 +19,7 @@ class BaseTaskEvent(BaseModel, ABC): def topic_name() -> str: raise NotImplementedError - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") def _dask_key_to_dask_task_id(key: dask.typing.Key) -> str: @@ -51,8 +50,15 @@ def from_dask_worker( task_owner=task_owner, ) - class Config(BaseTaskEvent.Config): - schema_extra: ClassVar[dict[str, Any]] = { + @field_validator("progress") + @classmethod + def ensure_between_0_1(cls, v): + if 0 <= v <= 1: + return v + return min(max(0, v), 1) + + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "job_id": "simcore/services/comp/sleeper:1.1.0:projectid_ec7e595a-63ee-46a1-a04a-901b11b649f8:nodeid_39467d89-b659-4914-9359-c40b1b6d1d6d:uuid_5ee5c655-450d-4711-a3ec-32ffe16bc580", @@ -78,13 +84,7 @@ class Config(BaseTaskEvent.Config): }, ] } - - @validator("progress", always=True) - @classmethod - def ensure_between_0_1(cls, v): - if 0 <= v <= 1: - return v - return min(max(0, v), 1) + ) LogMessageStr: TypeAlias = str @@ -112,8 +112,8 @@ def from_dask_worker( task_owner=task_owner, ) - class Config(BaseTaskEvent.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "job_id": "simcore/services/comp/sleeper:1.1.0:projectid_ec7e595a-63ee-46a1-a04a-901b11b649f8:nodeid_39467d89-b659-4914-9359-c40b1b6d1d6d:uuid_5ee5c655-450d-4711-a3ec-32ffe16bc580", @@ -129,3 +129,4 @@ class Config(BaseTaskEvent.Config): }, ] } + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index 887397d4227..5f46f75d93b 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -1,7 +1,7 @@ import json from contextlib import suppress from pathlib import Path -from typing import Any, ClassVar, TypeAlias, Union +from typing import Any, TypeAlias, Union from models_library.basic_regex import MIME_TYPE_RE from models_library.generics import DictModel @@ -9,7 +9,7 @@ from pydantic import ( AnyUrl, BaseModel, - Extra, + ConfigDict, Field, StrictBool, StrictFloat, @@ -23,9 +23,9 @@ class PortSchema(BaseModel): required: bool - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ { "required": True, @@ -34,15 +34,16 @@ class Config: "required": False, }, ] - } + }, + ) class FilePortSchema(PortSchema): mapping: str | None = None url: AnyUrl - class Config(PortSchema.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "mapping": "some_filename.txt", @@ -55,6 +56,7 @@ class Config(PortSchema.Config): }, ] } + ) class FileUrl(BaseModel): @@ -64,12 +66,12 @@ class FileUrl(BaseModel): description="Local file relpath name (if given), otherwise it takes the url filename", ) file_mime_type: str | None = Field( - default=None, description="the file MIME type", regex=MIME_TYPE_RE + default=None, description="the file MIME type", pattern=MIME_TYPE_RE ) - class Config: - extra = Extra.forbid - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + json_schema_extra={ "examples": [ {"url": "https://some_file_url", "file_mime_type": "application/json"}, { @@ -78,7 +80,8 @@ class Config: "file_mime_type": "application/json", }, ] - } + }, + ) PortValue: TypeAlias = Union[ @@ -94,8 +97,8 @@ class Config: class TaskInputData(DictModel[ServicePortKey, PortValue]): - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_input": False, @@ -106,6 +109,7 @@ class Config: }, ] } + ) PortSchemaValue: TypeAlias = Union[PortSchema, FilePortSchema] @@ -118,8 +122,8 @@ class TaskOutputDataSchema(DictModel[ServicePortKey, PortSchemaValue]): # does not work well in that case. For that reason, the schema is # sent as a json-schema instead of with a dynamically-created model class # - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_output": {"required": False}, @@ -138,6 +142,7 @@ class Config: }, ] } + ) class TaskOutputData(DictModel[ServicePortKey, PortValue]): @@ -172,8 +177,8 @@ def from_task_output( return cls.parse_obj(data) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "boolean_output": False, @@ -184,3 +189,4 @@ class Config: }, ] } + ) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py index 00f89d96d94..d64cc5f5fd3 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py @@ -1,4 +1,4 @@ -from typing import Any, ClassVar, Protocol, TypeAlias +from typing import Any, Protocol, TypeAlias from models_library.basic_types import EnvVarKey from models_library.docker import DockerLabelKey @@ -6,7 +6,7 @@ from models_library.projects_nodes_io import NodeID from models_library.services_resources import BootMode from models_library.users import UserID -from pydantic import AnyUrl, BaseModel, root_validator +from pydantic import AnyUrl, BaseModel, ConfigDict, model_validator from settings_library.s3 import S3Settings from .docker import DockerBasicAuth @@ -25,14 +25,14 @@ class TaskOwner(BaseModel): project_id: ProjectID node_id: NodeID - parent_project_id: ProjectID | None - parent_node_id: NodeID | None + parent_project_id: ProjectID | None = None + parent_node_id: NodeID | None = None @property def has_parent(self) -> bool: return bool(self.parent_node_id and self.parent_project_id) - @root_validator + @model_validator(mode="after") @classmethod def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: parent_project_id = values.get("parent_project_id") @@ -44,8 +44,8 @@ def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: raise ValueError(msg) return values - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "user_id": 32, @@ -63,6 +63,7 @@ class Config: }, ] } + ) class ContainerTaskParameters(BaseModel): @@ -76,24 +77,23 @@ class ContainerTaskParameters(BaseModel): boot_mode: BootMode task_owner: TaskOwner - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "image": "ubuntu", "tag": "latest", - "input_data": TaskInputData.Config.schema_extra["examples"][0], - "output_data_keys": TaskOutputDataSchema.Config.schema_extra[ - "examples" - ][0], + "input_data": TaskInputData.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] + "output_data_keys": TaskOutputDataSchema.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] "command": ["sleep 10", "echo hello"], "envs": {"MYENV": "is an env"}, "labels": {"io.simcore.thelabel": "is amazing"}, "boot_mode": BootMode.CPU.value, - "task_owner": TaskOwner.Config.schema_extra["examples"][0], + "task_owner": TaskOwner.model_config["json_schema_extra"]["examples"][0], # type: ignore[index] }, ] } + ) class ContainerRemoteFct(Protocol): From 85dffd8c0a8271a1884b1dd90e6e208c224bca06 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 13:36:09 +0200 Subject: [PATCH 003/280] upgrade notifications-library --- packages/notifications-library/requirements/_base.txt | 7 ++++++- packages/notifications-library/requirements/_test.txt | 7 ------- packages/notifications-library/requirements/_tools.txt | 10 ---------- 3 files changed, 6 insertions(+), 18 deletions(-) diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index b7e4320e9e0..f781af68ebf 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -4,6 +4,8 @@ aiosmtplib==3.0.2 # via -r requirements/_base.in alembic==1.13.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via -r requirements/../../../packages/models-library/requirements/_base.in async-timeout==4.0.3 @@ -63,7 +65,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -72,6 +74,8 @@ pydantic==1.10.17 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 @@ -108,6 +112,7 @@ typing-extensions==4.12.2 # via # alembic # pydantic + # pydantic-core # typer yarl==1.9.4 # via -r requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 48dd95db3fc..25211dd50f2 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -10,8 +10,6 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in greenlet==3.0.3 @@ -90,11 +88,6 @@ tenacity==9.0.0 # via -r requirements/_test.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in typing-extensions==4.12.2 diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index 71884cfdaf4..8204f34a33c 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -71,22 +71,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 17b945751b9bbf9bd2dfe6b99f6c009795aca530 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 13:50:36 +0200 Subject: [PATCH 004/280] upgrade postgres-database --- .../postgres-database/requirements/_base.txt | 7 +++- .../postgres-database/requirements/_test.txt | 7 ---- .../postgres-database/requirements/_tools.txt | 10 ----- .../utils_folders.py | 41 ++++++++++--------- .../utils_projects_metadata.py | 21 ++++------ .../utils_projects_nodes.py | 10 ++--- 6 files changed, 40 insertions(+), 56 deletions(-) diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index aaf19732f53..5cb99144fd9 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -1,5 +1,7 @@ alembic==1.13.2 # via -r requirements/_base.in +annotated-types==0.7.0 + # via pydantic async-timeout==4.0.3 # via asyncpg asyncpg==0.29.0 @@ -18,10 +20,12 @@ multidict==6.0.5 # via yarl psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in +pydantic-core==2.23.3 + # via pydantic sqlalchemy==1.4.53 # via # -c requirements/../../../requirements/constraints.txt @@ -31,5 +35,6 @@ typing-extensions==4.12.2 # via # alembic # pydantic + # pydantic-core yarl==1.9.4 # via -r requirements/_base.in diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 245a367c69b..8bd80b78b95 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -10,8 +10,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in greenlet==3.0.3 @@ -70,11 +68,6 @@ sqlalchemy==1.4.53 # aiopg sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-docker==7.1.0.20240821 # via -r requirements/_test.in types-psycopg2==2.9.21.20240819 diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index 10d7162ab81..9247bc4b1a9 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -70,22 +70,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index e0f59cdcfd2..31276e404b3 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -5,19 +5,20 @@ from datetime import datetime from enum import Enum from functools import reduce -from typing import Any, ClassVar, Final, TypeAlias, cast +from typing import Annotated, Any, ClassVar, Final, TypeAlias, cast import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from pydantic import ( BaseModel, - ConstrainedStr, + ConfigDict, Field, NonNegativeInt, PositiveInt, + StringConstraints, + TypeAdapter, ValidationError, - parse_obj_as, ) from pydantic.errors import PydanticErrorMixin from simcore_postgres_database.utils_ordering import OrderByDict @@ -294,18 +295,22 @@ def _get_filter_for_enabled_permissions( ### -class FolderName(ConstrainedStr): - regex = re.compile( - r'^(?!.*[<>:"/\\|?*\]])(?!.*\b(?:LPT9|COM1|LPT1|COM2|LPT3|LPT4|CON|COM5|COM3|COM4|AUX|PRN|LPT2|LPT5|COM6|LPT7|NUL|COM8|LPT6|COM9|COM7|LPT8)\b).+$', - re.IGNORECASE, - ) - min_length = 1 - max_length = 255 +FolderName = Annotated[ + str, + StringConstraints( + min_length=1, + max_length=255, + pattern=re.compile( + r'^(?!.*[<>:"/\\|?*\]])(?!.*\b(?:LPT9|COM1|LPT1|COM2|LPT3|LPT4|CON|COM5|COM3|COM4|AUX|PRN|LPT2|LPT5|COM6|LPT7|NUL|COM8|LPT6|COM9|COM7|LPT8)\b).+$', + re.IGNORECASE, + ), + ), +] class FolderEntry(BaseModel): id: _FolderID - parent_folder: _FolderID | None = Field(alias="traversal_parent_id") + parent_folder: _FolderID | None = Field(None, alias="traversal_parent_id") name: str description: str owner: _GroupID = Field(alias="created_by") @@ -313,23 +318,19 @@ class FolderEntry(BaseModel): modified: datetime = Field(alias="access_modified") my_access_rights: _FolderPermissions access_rights: dict[_GroupID, _FolderPermissions] - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class _ResolvedAccessRights(BaseModel): folder_id: _FolderID gid: _GroupID - traversal_parent_id: _FolderID | None - original_parent_id: _FolderID | None + traversal_parent_id: _FolderID | None = None + original_parent_id: _FolderID | None = None read: bool write: bool delete: bool level: int - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) async def _get_resolved_access_rights( @@ -529,7 +530,7 @@ async def folder_create( RootFolderRequiresAtLeastOnePrimaryGroupError """ try: - parse_obj_as(FolderName, name) + TypeAdapter(FolderName).validate_python(name) except ValidationError as exc: raise InvalidFolderNameError(name=name, reason=f"{exc}") from exc diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 39749b7fdbf..480cae0fb19 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -5,7 +5,7 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from pydantic.errors import PydanticErrorMixin from sqlalchemy.dialects.postgresql import insert as pg_insert @@ -46,17 +46,14 @@ class DBProjectInvalidParentNodeError(BaseProjectsMetadataError): class ProjectMetadata(BaseModel): - custom: dict[str, Any] | None - created: datetime.datetime | None - modified: datetime.datetime | None - parent_project_uuid: uuid.UUID | None - parent_node_id: uuid.UUID | None - root_parent_project_uuid: uuid.UUID | None - root_parent_node_id: uuid.UUID | None - - class Config: - frozen = True - orm_mode = True + custom: dict[str, Any] | None = None + created: datetime.datetime | None = None + modified: datetime.datetime | None = None + parent_project_uuid: uuid.UUID | None = None + parent_node_id: uuid.UUID | None = None + root_parent_project_uuid: uuid.UUID | None = None + root_parent_node_id: uuid.UUID | None = None + model_config = ConfigDict(frozen=True, from_attributes=True) # diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 09cb8a561f4..2ee32815626 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -5,7 +5,7 @@ import sqlalchemy from aiopg.sa.connection import SAConnection -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from pydantic.errors import PydanticErrorMixin from sqlalchemy.dialects.postgresql import insert as pg_insert @@ -43,18 +43,16 @@ class ProjectNodeCreate(BaseModel): @classmethod def get_field_names(cls, *, exclude: set[str]) -> set[str]: - return {name for name in cls.__fields__ if name not in exclude} + return {name for name in cls.model_fields.keys() if name not in exclude} - class Config: - frozen = True + model_config = ConfigDict(frozen=True) class ProjectNode(ProjectNodeCreate): created: datetime.datetime modified: datetime.datetime - class Config(ProjectNodeCreate.Config): - orm_mode = True + model_config = ConfigDict(from_attributes=True) @dataclass(frozen=True, kw_only=True) From 9571d2f85161eb397fab06aab754937e7f725500 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 14:00:25 +0200 Subject: [PATCH 005/280] upgrade service-library --- .../service-library/requirements/_aiohttp.txt | 4 +- .../service-library/requirements/_base.txt | 14 +++--- .../service-library/requirements/_fastapi.txt | 9 +--- .../service-library/requirements/_test.txt | 17 ------- .../service-library/requirements/_tools.txt | 10 ----- .../servicelib/aiohttp/requests_validation.py | 5 +-- .../src/servicelib/docker_utils.py | 44 ++++++++----------- .../src/servicelib/error_codes.py | 7 ++- .../servicelib/long_running_tasks/_models.py | 6 +-- .../tests/aiohttp/test_requests_validation.py | 10 ++--- 10 files changed, 39 insertions(+), 87 deletions(-) diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt index cebbcc2dd9f..b0132a53002 100644 --- a/packages/service-library/requirements/_aiohttp.txt +++ b/packages/service-library/requirements/_aiohttp.txt @@ -14,9 +14,7 @@ aiosignal==1.3.1 aiozipkin==1.1.1 # via -r requirements/_aiohttp.in async-timeout==4.0.3 - # via - # aiohttp - # aiopg + # via aiopg attrs==24.2.0 # via # -r requirements/_aiohttp.in diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 69daeedb073..0f4ac721816 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -18,6 +18,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -26,10 +28,6 @@ arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==24.2.0 # via # aiohttp @@ -41,8 +39,6 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via anyio fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -76,7 +72,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -85,6 +81,8 @@ pydantic==1.10.17 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -137,9 +135,9 @@ types-python-dateutil==2.9.0.20240821 typing-extensions==4.12.2 # via # aiodebug - # anyio # faststream # pydantic + # pydantic-core # typer yarl==1.9.4 # via diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 9a07c682d49..2f066595b42 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -11,13 +11,8 @@ certifi==2024.7.4 # httpx click==8.1.7 # via uvicorn -exceptiongroup==1.2.2 - # via anyio fastapi==0.99.1 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/_fastapi.in # prometheus-fastapi-instrumentator h11==0.14.0 @@ -42,7 +37,7 @@ prometheus-client==0.20.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/_fastapi.in -pydantic==1.10.17 +pydantic==1.10.18 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -61,9 +56,7 @@ starlette==0.27.0 # fastapi typing-extensions==4.12.2 # via - # anyio # fastapi # pydantic - # uvicorn uvicorn==0.30.6 # via -r requirements/_fastapi.in diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 789fdc81902..a64640db6ad 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -21,11 +21,6 @@ anyio==4.4.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_aiohttp.txt - # -c requirements/_base.txt - # aiohttp attrs==24.2.0 # via # -c requirements/_aiohttp.txt @@ -52,12 +47,6 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # -c requirements/_fastapi.txt - # anyio - # pytest execnet==2.1.1 # via pytest-xdist faker==27.0.0 @@ -250,11 +239,6 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in types-psycopg2==2.9.21.20240819 @@ -265,7 +249,6 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_fastapi.txt - # anyio # mypy # sqlalchemy2-stubs urllib3==2.2.2 diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index 4695266d9c8..b54db6d8f5c 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -72,22 +72,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 2fd5d0e41f0..0ec28708532 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -14,7 +14,7 @@ from aiohttp import web from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, ValidationError, parse_obj_as +from pydantic import BaseModel, ConfigDict, ValidationError, parse_obj_as from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from . import status @@ -31,8 +31,7 @@ class RequestParams(BaseModel): class StrictRequestParams(BaseModel): """Use a base class for context, path and query parameters""" - class Config: - extra = Extra.forbid # strict + model_config = ConfigDict(extra="forbid") @contextmanager diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index 3b3159d5916..8ad7a6c6c0c 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -11,7 +11,7 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ProgressDetail from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ValidationError, parse_obj_as +from pydantic import BaseModel, ByteSize, ConfigDict, ValidationError, parse_obj_as from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -39,11 +39,9 @@ class DockerLayerSizeV2(BaseModel): media_type: str size: ByteSize digest: str - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + frozen=True, alias_generator=snake_to_camel, populate_by_name=True + ) class DockerImageManifestsV2(BaseModel): @@ -51,12 +49,12 @@ class DockerImageManifestsV2(BaseModel): media_type: str config: DockerLayerSizeV2 layers: list[DockerLayerSizeV2] - - class Config: - keep_untouched = (cached_property,) - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + ignored_types=(cached_property,), + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, + ) @cached_property def layers_total_size(self) -> ByteSize: @@ -67,23 +65,19 @@ class DockerImageMultiArchManifestsV2(BaseModel): schema_version: Literal[2] media_type: Literal["application/vnd.oci.image.index.v1+json"] manifests: list[dict[str, Any]] - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + model_config = ConfigDict( + frozen=True, alias_generator=snake_to_camel, populate_by_name=True + ) class _DockerPullImage(BaseModel): status: str - id: str | None - progress_detail: ProgressDetail | None - progress: str | None - - class Config: - frozen = True - alias_generator = snake_to_camel - allow_population_by_field_name = True + id: str | None = None + progress_detail: ProgressDetail | None = None + progress: str | None = None + model_config = ConfigDict( + frozen=True, alias_generator=snake_to_camel, populate_by_name=True + ) DOCKER_HUB_HOST: Final[str] = "registry-1.docker.io" diff --git a/packages/service-library/src/servicelib/error_codes.py b/packages/service-library/src/servicelib/error_codes.py index 2803e3627ab..461f3ee2964 100644 --- a/packages/service-library/src/servicelib/error_codes.py +++ b/packages/service-library/src/servicelib/error_codes.py @@ -11,8 +11,9 @@ import re from typing import TYPE_CHECKING +from pydantic import StringConstraints from pydantic.tools import parse_obj_as -from pydantic.types import constr +from typing_extensions import Annotated _LABEL = "OEC:{}" _PATTERN = r"OEC:\d+" @@ -20,7 +21,9 @@ if TYPE_CHECKING: ErrorCodeStr = str else: - ErrorCodeStr = constr(strip_whitespace=True, regex=_PATTERN) + ErrorCodeStr = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=_PATTERN) + ] def create_error_code(exception: BaseException) -> ErrorCodeStr: diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index b211ca29fdc..87fceb3b3e0 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -15,7 +15,7 @@ TaskResult, TaskStatus, ) -from pydantic import BaseModel, Field, PositiveFloat +from pydantic import BaseModel, ConfigDict, Field, PositiveFloat TaskName: TypeAlias = str @@ -46,9 +46,7 @@ class TrackedTask(BaseModel): "polled by the client who created it" ), ) - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict(arbitrary_types_allowed=True) class ClientConfiguration(BaseModel): diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index 08e2f07bfbe..c3320668bc9 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -11,7 +11,7 @@ from aiohttp.test_utils import TestClient from faker import Faker from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, @@ -41,9 +41,7 @@ def create_fake(cls, faker: Faker): class MyRequestPathParams(BaseModel): project_uuid: UUID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @classmethod def create_fake(cls, faker: Faker): @@ -66,9 +64,7 @@ def create_fake(cls, faker: Faker): class MyRequestHeadersParams(BaseModel): user_agent: str = Field(alias="X-Simcore-User-Agent") optional_header: str | None = Field(default=None, alias="X-Simcore-Optional-Header") - - class Config: - allow_population_by_field_name = False + model_config = ConfigDict(populate_by_name=False) @classmethod def create_fake(cls, faker: Faker): From bff67e6041c3f8064656daab077ba8ca2685f8a5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 14:10:37 +0200 Subject: [PATCH 006/280] upgrade simcore-sdk --- packages/simcore-sdk/requirements/_base.txt | 12 +- packages/simcore-sdk/requirements/_test.txt | 24 +- packages/simcore-sdk/requirements/_tools.txt | 10 - .../node_ports_common/r_clone_utils.py | 6 +- .../src/simcore_sdk/node_ports_v2/links.py | 15 +- .../simcore_sdk/node_ports_v2/nodeports_v2.py | 8 +- .../src/simcore_sdk/node_ports_v2/port.py | 18 +- .../test_node_ports_common_filemanager.py | 1398 ++++++++--------- .../unit/test_node_ports_v2_port_mapping.py | 19 +- .../test_node_ports_v2_port_validation.py | 5 +- 10 files changed, 756 insertions(+), 759 deletions(-) diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 14712a97fc2..7d46fa1bcc9 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -31,6 +31,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -44,10 +46,8 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 @@ -61,8 +61,6 @@ dnspython==2.6.1 # via email-validator email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via anyio fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -129,7 +127,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -146,6 +144,8 @@ pydantic==1.10.17 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -225,12 +225,12 @@ typing-extensions==4.12.2 # via # aiodebug # alembic - # anyio # faststream # flexcache # flexparser # pint # pydantic + # pydantic-core # typer yarl==1.9.4 # via diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index db22f856cee..6a6b2d8eea7 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -28,12 +28,12 @@ alembic==1.13.2 # via # -c requirements/_base.txt # -r requirements/_test.in -antlr4-python3-runtime==4.13.2 - # via moto -async-timeout==4.0.3 +annotated-types==0.7.0 # via # -c requirements/_base.txt - # aiohttp + # pydantic +antlr4-python3-runtime==4.13.2 + # via moto attrs==24.2.0 # via # -c requirements/_base.txt @@ -88,10 +88,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # pytest execnet==2.1.1 # via pytest-xdist faker==27.0.0 @@ -207,11 +203,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.3 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -304,11 +304,6 @@ sympy==1.13.2 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiobotocore==2.13.2 # via -r requirements/_test.in types-aiobotocore-s3==2.13.2 @@ -327,6 +322,7 @@ typing-extensions==4.12.2 # cfn-lint # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index 8ca413e037f..5a573bd4848 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -76,22 +76,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index f539e451026..0a059c8e5ff 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -3,7 +3,7 @@ from typing import Union from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, Field, parse_raw_as +from pydantic import BaseModel, ByteSize, ConfigDict, Field, parse_raw_as from servicelib.logging_utils import log_catch from servicelib.progress_bar import ProgressBarData @@ -31,9 +31,7 @@ class _RCloneSyncTransferCompletedMessage(_RCloneSyncMessageBase): class _RCloneSyncTransferringStats(BaseModel): bytes: ByteSize total_bytes: ByteSize - - class Config: - alias_generator = snake_to_camel + model_config = ConfigDict(alias_generator=snake_to_camel) class _RCloneSyncTransferringMessage(_RCloneSyncMessageBase): diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py index d8eb1d99349..1ec86b7e8db 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py @@ -4,18 +4,25 @@ from models_library.basic_regex import UUID_RE from models_library.projects_nodes_io import BaseFileLink, DownloadLink from models_library.projects_nodes_io import PortLink as BasePortLink -from pydantic import AnyUrl, Extra, Field, StrictBool, StrictFloat, StrictInt, StrictStr +from pydantic import ( + AnyUrl, + ConfigDict, + Field, + StrictBool, + StrictFloat, + StrictInt, + StrictStr, +) class PortLink(BasePortLink): - node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it + node_uuid: str = Field(..., pattern=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it class FileLink(BaseFileLink): """allow all kind of file links""" - class Config: - extra = Extra.allow + model_config = ConfigDict(extra="allow") # TODO: needs to be in sync with project_nodes.InputTypes and project_nodes.OutputTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 8c78e28a066..02a6fd0a0a8 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -9,7 +9,7 @@ from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey from models_library.users import UserID -from pydantic import BaseModel, Field, ValidationError +from pydantic import BaseModel, ConfigDict, Field, ValidationError from pydantic.error_wrappers import flatten_errors from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -45,11 +45,9 @@ class Nodeports(BaseModel): ] auto_update: bool = False r_clone_settings: RCloneSettings | None = None - io_log_redirect_cb: LogRedirectCB | None + io_log_redirect_cb: LogRedirectCB | None = None aws_s3_cli_settings: AwsS3CliSettings | None = None - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict(arbitrary_types_allowed=True) def __init__(self, **data: Any): super().__init__(**data) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 2338563dcdb..762e6b4e383 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -10,7 +10,14 @@ from models_library.basic_types import IDStr from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey -from pydantic import AnyUrl, Field, PrivateAttr, ValidationError, validator +from pydantic import ( + AnyUrl, + ConfigDict, + Field, + PrivateAttr, + ValidationError, + field_validator, +) from pydantic.tools import parse_obj_as from servicelib.progress_bar import ProgressBarData @@ -72,7 +79,7 @@ class Port(BaseServiceIOModel): widget: dict[str, Any] | None = None default_value: DataItemValue | None = Field(None, alias="defaultValue") - value: DataItemValue | None = None + value: DataItemValue | None = Field(None, validate_default=True) # Different states of "value" # - e.g. typically after resolving a port's link, a download link, ... @@ -90,10 +97,9 @@ class Port(BaseServiceIOModel): # flags _used_default_value: bool = PrivateAttr(False) - class Config(BaseServiceIOModel.Config): - validate_assignment = True + model_configg = ConfigDict(validate_assignment=True) - @validator("value", always=True) + @field_validator("value") @classmethod def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: if ( @@ -119,7 +125,7 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: ) return v - @validator("value_item", "value_concrete", pre=True) + @field_validator("value_item", "value_concrete", mode="before") @classmethod def check_item_or_concrete_value(cls, v, values): if ( diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 9cd1ce32de4..81b7affc252 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -1,699 +1,699 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments -# pylint:disable=protected-access - -import filecmp -from collections.abc import Awaitable, Callable -from pathlib import Path -from typing import Any -from uuid import uuid4 - -import pytest -from aiohttp import ClientError -from faker import Faker -from models_library.projects_nodes_io import ( - LocationID, - SimcoreS3DirectoryID, - SimcoreS3FileID, -) -from models_library.users import UserID -from pydantic import BaseModel, ByteSize, parse_obj_as -from pytest_mock import MockerFixture -from pytest_simcore.helpers.parametrizations import byte_size_ids -from servicelib.progress_bar import ProgressBarData -from settings_library.aws_s3_cli import AwsS3CliSettings -from settings_library.r_clone import RCloneSettings -from simcore_sdk.node_ports_common import exceptions, filemanager -from simcore_sdk.node_ports_common.aws_s3_cli import AwsS3CliFailedError -from simcore_sdk.node_ports_common.filemanager import UploadedFile, UploadedFolder -from simcore_sdk.node_ports_common.r_clone import RCloneFailedError -from yarl import URL - -pytest_simcore_core_services_selection = [ - "migration", - "postgres", - "storage", - "redis", -] - -pytest_simcore_ops_services_selection = ["minio", "adminer"] - - -class _SyncSettings(BaseModel): - r_clone_settings: RCloneSettings | None - aws_s3_cli_settings: AwsS3CliSettings | None - - -@pytest.fixture( - params=[(True, False), (False, True), (False, False)], - ids=[ - "RClone enabled", - "AwsS3Cli enabled", - "Both RClone and AwsS3Cli disabled", - ], -) -def optional_sync_settings( - r_clone_settings: RCloneSettings, - aws_s3_cli_settings: AwsS3CliSettings, - request: pytest.FixtureRequest, -) -> _SyncSettings: - _rclone_enabled, _aws_s3_cli_enabled = request.param - - _r_clone_settings = r_clone_settings if _rclone_enabled else None - _aws_s3_cli_settings = aws_s3_cli_settings if _aws_s3_cli_enabled else None - - return _SyncSettings( - r_clone_settings=_r_clone_settings, aws_s3_cli_settings=_aws_s3_cli_settings - ) - - -def _file_size(size_str: str, **pytest_params): - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - _file_size("103Mib"), - _file_size("1003Mib", marks=pytest.mark.heavy_load), - _file_size("7Gib", marks=pytest.mark.heavy_load), - ], - ids=byte_size_ids, -) -async def test_valid_upload_download( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - file_size: ByteSize, - create_file_of_size: Callable[[ByteSize, str], Path], - optional_sync_settings: _SyncSettings, - simcore_services_ready: None, - storage_service: URL, - faker: Faker, -): - file_path = create_file_of_size(file_size, "test.test") - - file_id = create_valid_file_uuid("", file_path) - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - # pylint: disable=protected-access - assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 - assert store_id == s3_simcore_location - assert e_tag - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - download_folder = Path(tmpdir) / "downloads" - download_file_path = await filemanager.download_path_from_s3( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 - assert download_file_path.exists() - assert download_file_path.name == "test.test" - assert filecmp.cmp(download_file_path, file_path) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - _file_size("103Mib"), - ], - ids=byte_size_ids, -) -async def test_valid_upload_download_using_file_object( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - file_size: ByteSize, - create_file_of_size: Callable[[ByteSize, str], Path], - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = create_file_of_size(file_size, "test.test") - - file_id = create_valid_file_uuid("", file_path) - with file_path.open("rb") as file_object: - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=filemanager.UploadableFileObject( - file_object, file_path.name, file_path.stat().st_size - ), - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - download_folder = Path(tmpdir) / "downloads" - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: - download_file_path = await filemanager.download_path_from_s3( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 - assert download_file_path.exists() - assert download_file_path.name == "test.test" - assert filecmp.cmp(download_file_path, file_path) - - -@pytest.fixture -def mocked_upload_file_raising_exceptions(mocker: MockerFixture) -> None: - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", - autospec=True, - side_effect=RCloneFailedError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", - autospec=True, - side_effect=ClientError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.aws_s3_cli.sync_local_to_s3", - autospec=True, - side_effect=AwsS3CliFailedError, - ) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_failed_upload_is_properly_removed_from_storage( - node_ports_config: None, - create_file_of_size: Callable[[ByteSize], Path], - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - file_size: ByteSize, - user_id: UserID, - mocked_upload_file_raising_exceptions: None, -): - file_path = create_file_of_size(file_size) - file_id = create_valid_file_uuid("", file_path) - with pytest.raises(exceptions.S3TransferError): - await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - with pytest.raises(exceptions.S3InvalidPathError): - await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - - -@pytest.mark.parametrize( - "file_size", - [ - _file_size("10Mib"), - ], - ids=byte_size_ids, -) -async def test_failed_upload_after_valid_upload_keeps_last_valid_state( - node_ports_config: None, - create_file_of_size: Callable[[ByteSize], Path], - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - file_size: ByteSize, - user_id: UserID, - mocker: MockerFixture, -): - # upload a valid file - file_path = create_file_of_size(file_size) - file_id = create_valid_file_uuid("", file_path) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - # check the file is correctly uploaded - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=store_id, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - # now start an invalid update by generating an exception while uploading the same file - mocker.patch( - "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", - autospec=True, - side_effect=RCloneFailedError, - ) - mocker.patch( - "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", - autospec=True, - side_effect=ClientError, - ) - with pytest.raises(exceptions.S3TransferError): - await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - r_clone_settings=optional_sync_settings.r_clone_settings, - io_log_redirect_cb=None, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - # the file shall be back to its original state - file_metadata = await filemanager.get_file_metadata( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - assert file_metadata.location == store_id - assert file_metadata.etag == e_tag - - -async def test_invalid_file_path( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - store = s3_simcore_location - with pytest.raises(FileNotFoundError): - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=file_id, - path_to_upload=Path(tmpdir) / "some other file.txt", - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -async def test_errors_upon_invalid_file_identifiers( - node_ports_config: None, - tmpdir: Path, - user_id: UserID, - project_id: str, - s3_simcore_location: LocationID, - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - store = s3_simcore_location - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - invalid_s3_path = SimcoreS3FileID("") - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_s3_path, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 - invalid_file_id = SimcoreS3FileID("file_id") - await filemanager.upload_path( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - invalid_s3_path = SimcoreS3FileID("") - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=invalid_s3_path, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=store, - store_name=None, - s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -async def test_invalid_store( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - optional_sync_settings: _SyncSettings, - faker: Faker, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - store = "somefunkystore" - with pytest.raises(exceptions.S3InvalidStore): - await filemanager.upload_path( - user_id=user_id, - store_id=None, - store_name=store, # type: ignore - s3_object=file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - - download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 - async with ProgressBarData( - num_steps=1, description=faker.pystr() - ) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_id=None, - store_name=store, # type: ignore - s3_object=file_id, - local_path=download_folder, - io_log_redirect_cb=None, - r_clone_settings=optional_sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, - ) - - -@pytest.fixture( - params=[True, False], - ids=["with RClone", "with AwsS3Cli"], -) -def sync_settings( - r_clone_settings: RCloneSettings, - aws_s3_cli_settings: AwsS3CliSettings, - request: pytest.FixtureRequest, -) -> _SyncSettings: - is_rclone_enabled = request.param - - return _SyncSettings( - r_clone_settings=r_clone_settings if is_rclone_enabled else None, - aws_s3_cli_settings=aws_s3_cli_settings if not is_rclone_enabled else None, - ) - - -@pytest.mark.parametrize("is_directory", [False, True]) -async def test_valid_metadata( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - sync_settings: _SyncSettings, - is_directory: bool, -): - # first we go with a non-existing file - file_path = Path(tmpdir) / "a-subdir" / "test.test" - file_path.parent.mkdir(parents=True, exist_ok=True) - - path_to_upload = file_path.parent if is_directory else file_path - - file_id = create_valid_file_uuid("", path_to_upload) - assert file_path.exists() is False - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, - store_id=s3_simcore_location, - s3_object=file_id, - is_directory=is_directory, - ) - assert is_metadata_present is False - - # now really create the file and upload it - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", path_to_upload) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=path_to_upload, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - if is_directory: - assert isinstance(upload_result, UploadedFolder) - else: - assert isinstance(upload_result, UploadedFile) - assert upload_result.store_id == s3_simcore_location - assert upload_result.etag - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, - store_id=s3_simcore_location, - s3_object=file_id, - is_directory=is_directory, - ) - - assert is_metadata_present is True - - -@pytest.mark.parametrize( - "fct, extra_kwargs", - [ - (filemanager.entry_exists, {"is_directory": False}), - (filemanager.delete_file, {}), - (filemanager.get_file_metadata, {}), - ], -) -async def test_invalid_call_raises_exception( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - fct: Callable[[int, str, str, Any | None], Awaitable], - extra_kwargs: dict[str, Any], -): - file_path = Path(tmpdir) / "test.test" - file_id = create_valid_file_uuid("", file_path) - assert file_path.exists() is False - - with pytest.raises(exceptions.StorageInvalidCall): - await fct( - user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore - ) - with pytest.raises(exceptions.StorageInvalidCall): - await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore - with pytest.raises(exceptions.StorageInvalidCall): - await fct( - user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore - ) - - -async def test_delete_file( - node_ports_config: None, - tmpdir: Path, - user_id: int, - create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], - s3_simcore_location: LocationID, - storage_service: URL, -): - file_path = Path(tmpdir) / "test.test" - file_path.write_text("I am a test file") - assert file_path.exists() - - file_id = create_valid_file_uuid("", file_path) - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=file_id, - path_to_upload=file_path, - io_log_redirect_cb=None, - ) - assert isinstance(upload_result, UploadedFile) - store_id, e_tag = upload_result.store_id, upload_result.etag - assert store_id == s3_simcore_location - assert e_tag - - is_metadata_present = await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False - ) - assert is_metadata_present is True - - await filemanager.delete_file( - user_id=user_id, store_id=s3_simcore_location, s3_object=file_id - ) - - # check that it disappeared - assert ( - await filemanager.entry_exists( - user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False - ) - is False - ) - - -@pytest.mark.parametrize("files_in_folder", [1, 10]) -async def test_upload_path_source_is_a_folder( - node_ports_config: None, - project_id: str, - tmp_path: Path, - faker: Faker, - user_id: int, - s3_simcore_location: LocationID, - files_in_folder: int, - sync_settings: _SyncSettings, -): - source_dir = tmp_path / f"source-{faker.uuid4()}" - source_dir.mkdir(parents=True, exist_ok=True) - - download_dir = tmp_path / f"download-{faker.uuid4()}" - download_dir.mkdir(parents=True, exist_ok=True) - - for i in range(files_in_folder): - (source_dir / f"file-{i}.txt").write_text("1") - - directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( - f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" - ) - s3_object = SimcoreS3FileID(directory_id) - - upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( - user_id=user_id, - store_id=s3_simcore_location, - store_name=None, - s3_object=s3_object, - path_to_upload=source_dir, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - assert isinstance(upload_result, UploadedFolder) - assert source_dir.exists() - - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: - await filemanager.download_path_from_s3( - user_id=user_id, - store_name=None, - store_id=s3_simcore_location, - s3_object=s3_object, - local_path=download_dir, - io_log_redirect_cb=None, - r_clone_settings=sync_settings.r_clone_settings, - progress_bar=progress_bar, - aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, - ) - assert download_dir.exists() - - # ensure all files in download and source directory are the same - file_names: set = {f.name for f in source_dir.glob("*")} & { - f.name for f in download_dir.glob("*") - } - for file_name in file_names: - filecmp.cmp(source_dir / file_name, download_dir / file_name, shallow=False) +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=protected-access + +import filecmp +from collections.abc import Awaitable, Callable +from pathlib import Path +from typing import Any +from uuid import uuid4 + +import pytest +from aiohttp import ClientError +from faker import Faker +from models_library.projects_nodes_io import ( + LocationID, + SimcoreS3DirectoryID, + SimcoreS3FileID, +) +from models_library.users import UserID +from pydantic import BaseModel, ByteSize, parse_obj_as +from pytest_mock import MockerFixture +from pytest_simcore.helpers.parametrizations import byte_size_ids +from servicelib.progress_bar import ProgressBarData +from settings_library.aws_s3_cli import AwsS3CliSettings +from settings_library.r_clone import RCloneSettings +from simcore_sdk.node_ports_common import exceptions, filemanager +from simcore_sdk.node_ports_common.aws_s3_cli import AwsS3CliFailedError +from simcore_sdk.node_ports_common.filemanager import UploadedFile, UploadedFolder +from simcore_sdk.node_ports_common.r_clone import RCloneFailedError +from yarl import URL + +pytest_simcore_core_services_selection = [ + "migration", + "postgres", + "storage", + "redis", +] + +pytest_simcore_ops_services_selection = ["minio", "adminer"] + + +class _SyncSettings(BaseModel): + r_clone_settings: RCloneSettings | None = None + aws_s3_cli_settings: AwsS3CliSettings | None = None + + +@pytest.fixture( + params=[(True, False), (False, True), (False, False)], + ids=[ + "RClone enabled", + "AwsS3Cli enabled", + "Both RClone and AwsS3Cli disabled", + ], +) +def optional_sync_settings( + r_clone_settings: RCloneSettings, + aws_s3_cli_settings: AwsS3CliSettings, + request: pytest.FixtureRequest, +) -> _SyncSettings: + _rclone_enabled, _aws_s3_cli_enabled = request.param + + _r_clone_settings = r_clone_settings if _rclone_enabled else None + _aws_s3_cli_settings = aws_s3_cli_settings if _aws_s3_cli_enabled else None + + return _SyncSettings( + r_clone_settings=_r_clone_settings, aws_s3_cli_settings=_aws_s3_cli_settings + ) + + +def _file_size(size_str: str, **pytest_params): + return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + _file_size("103Mib"), + _file_size("1003Mib", marks=pytest.mark.heavy_load), + _file_size("7Gib", marks=pytest.mark.heavy_load), + ], + ids=byte_size_ids, +) +async def test_valid_upload_download( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize, str], Path], + optional_sync_settings: _SyncSettings, + simcore_services_ready: None, + storage_service: URL, + faker: Faker, +): + file_path = create_file_of_size(file_size, "test.test") + + file_id = create_valid_file_uuid("", file_path) + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + # pylint: disable=protected-access + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + assert store_id == s3_simcore_location + assert e_tag + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + download_folder = Path(tmpdir) / "downloads" + download_file_path = await filemanager.download_path_from_s3( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 + assert download_file_path.exists() + assert download_file_path.name == "test.test" + assert filecmp.cmp(download_file_path, file_path) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + _file_size("103Mib"), + ], + ids=byte_size_ids, +) +async def test_valid_upload_download_using_file_object( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + file_size: ByteSize, + create_file_of_size: Callable[[ByteSize, str], Path], + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = create_file_of_size(file_size, "test.test") + + file_id = create_valid_file_uuid("", file_path) + with file_path.open("rb") as file_object: + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=filemanager.UploadableFileObject( + file_object, file_path.name, file_path.stat().st_size + ), + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + download_folder = Path(tmpdir) / "downloads" + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + download_file_path = await filemanager.download_path_from_s3( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + assert download_file_path.exists() + assert download_file_path.name == "test.test" + assert filecmp.cmp(download_file_path, file_path) + + +@pytest.fixture +def mocked_upload_file_raising_exceptions(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", + autospec=True, + side_effect=RCloneFailedError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", + autospec=True, + side_effect=ClientError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.aws_s3_cli.sync_local_to_s3", + autospec=True, + side_effect=AwsS3CliFailedError, + ) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + ], + ids=byte_size_ids, +) +async def test_failed_upload_is_properly_removed_from_storage( + node_ports_config: None, + create_file_of_size: Callable[[ByteSize], Path], + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + file_size: ByteSize, + user_id: UserID, + mocked_upload_file_raising_exceptions: None, +): + file_path = create_file_of_size(file_size) + file_id = create_valid_file_uuid("", file_path) + with pytest.raises(exceptions.S3TransferError): + await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + with pytest.raises(exceptions.S3InvalidPathError): + await filemanager.get_file_metadata( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + + +@pytest.mark.parametrize( + "file_size", + [ + _file_size("10Mib"), + ], + ids=byte_size_ids, +) +async def test_failed_upload_after_valid_upload_keeps_last_valid_state( + node_ports_config: None, + create_file_of_size: Callable[[ByteSize], Path], + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + file_size: ByteSize, + user_id: UserID, + mocker: MockerFixture, +): + # upload a valid file + file_path = create_file_of_size(file_size) + file_id = create_valid_file_uuid("", file_path) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + # check the file is correctly uploaded + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=store_id, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + # now start an invalid update by generating an exception while uploading the same file + mocker.patch( + "simcore_sdk.node_ports_common.filemanager.r_clone.sync_local_to_s3", + autospec=True, + side_effect=RCloneFailedError, + ) + mocker.patch( + "simcore_sdk.node_ports_common.file_io_utils._upload_file_part", + autospec=True, + side_effect=ClientError, + ) + with pytest.raises(exceptions.S3TransferError): + await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + r_clone_settings=optional_sync_settings.r_clone_settings, + io_log_redirect_cb=None, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + # the file shall be back to its original state + file_metadata = await filemanager.get_file_metadata( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + assert file_metadata.location == store_id + assert file_metadata.etag == e_tag + + +async def test_invalid_file_path( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + store = s3_simcore_location + with pytest.raises(FileNotFoundError): + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=file_id, + path_to_upload=Path(tmpdir) / "some other file.txt", + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +async def test_errors_upon_invalid_file_identifiers( + node_ports_config: None, + tmpdir: Path, + user_id: UserID, + project_id: str, + s3_simcore_location: LocationID, + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + store = s3_simcore_location + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + invalid_s3_path = SimcoreS3FileID("") + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_s3_path, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + with pytest.raises(exceptions.StorageInvalidCall): # noqa: PT012 + invalid_file_id = SimcoreS3FileID("file_id") + await filemanager.upload_path( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + invalid_s3_path = SimcoreS3FileID("") + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=invalid_s3_path, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=store, + store_name=None, + s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +async def test_invalid_store( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + optional_sync_settings: _SyncSettings, + faker: Faker, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + store = "somefunkystore" + with pytest.raises(exceptions.S3InvalidStore): + await filemanager.upload_path( + user_id=user_id, + store_id=None, + store_name=store, # type: ignore + s3_object=file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + + download_folder = Path(tmpdir) / "downloads" + with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 + async with ProgressBarData( + num_steps=1, description=faker.pystr() + ) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_id=None, + store_name=store, # type: ignore + s3_object=file_id, + local_path=download_folder, + io_log_redirect_cb=None, + r_clone_settings=optional_sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, + ) + + +@pytest.fixture( + params=[True, False], + ids=["with RClone", "with AwsS3Cli"], +) +def sync_settings( + r_clone_settings: RCloneSettings, + aws_s3_cli_settings: AwsS3CliSettings, + request: pytest.FixtureRequest, +) -> _SyncSettings: + is_rclone_enabled = request.param + + return _SyncSettings( + r_clone_settings=r_clone_settings if is_rclone_enabled else None, + aws_s3_cli_settings=aws_s3_cli_settings if not is_rclone_enabled else None, + ) + + +@pytest.mark.parametrize("is_directory", [False, True]) +async def test_valid_metadata( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + sync_settings: _SyncSettings, + is_directory: bool, +): + # first we go with a non-existing file + file_path = Path(tmpdir) / "a-subdir" / "test.test" + file_path.parent.mkdir(parents=True, exist_ok=True) + + path_to_upload = file_path.parent if is_directory else file_path + + file_id = create_valid_file_uuid("", path_to_upload) + assert file_path.exists() is False + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, + store_id=s3_simcore_location, + s3_object=file_id, + is_directory=is_directory, + ) + assert is_metadata_present is False + + # now really create the file and upload it + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", path_to_upload) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=path_to_upload, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + if is_directory: + assert isinstance(upload_result, UploadedFolder) + else: + assert isinstance(upload_result, UploadedFile) + assert upload_result.store_id == s3_simcore_location + assert upload_result.etag + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, + store_id=s3_simcore_location, + s3_object=file_id, + is_directory=is_directory, + ) + + assert is_metadata_present is True + + +@pytest.mark.parametrize( + "fct, extra_kwargs", + [ + (filemanager.entry_exists, {"is_directory": False}), + (filemanager.delete_file, {}), + (filemanager.get_file_metadata, {}), + ], +) +async def test_invalid_call_raises_exception( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + fct: Callable[[int, str, str, Any | None], Awaitable], + extra_kwargs: dict[str, Any], +): + file_path = Path(tmpdir) / "test.test" + file_id = create_valid_file_uuid("", file_path) + assert file_path.exists() is False + + with pytest.raises(exceptions.StorageInvalidCall): + await fct( + user_id=None, store_id=s3_simcore_location, s3_object=file_id, **extra_kwargs # type: ignore + ) + with pytest.raises(exceptions.StorageInvalidCall): + await fct(user_id=user_id, store_id=None, s3_object=file_id, **extra_kwargs) # type: ignore + with pytest.raises(exceptions.StorageInvalidCall): + await fct( + user_id=user_id, store_id=s3_simcore_location, s3_object="bing", **extra_kwargs # type: ignore + ) + + +async def test_delete_file( + node_ports_config: None, + tmpdir: Path, + user_id: int, + create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], + s3_simcore_location: LocationID, + storage_service: URL, +): + file_path = Path(tmpdir) / "test.test" + file_path.write_text("I am a test file") + assert file_path.exists() + + file_id = create_valid_file_uuid("", file_path) + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=file_id, + path_to_upload=file_path, + io_log_redirect_cb=None, + ) + assert isinstance(upload_result, UploadedFile) + store_id, e_tag = upload_result.store_id, upload_result.etag + assert store_id == s3_simcore_location + assert e_tag + + is_metadata_present = await filemanager.entry_exists( + user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + ) + assert is_metadata_present is True + + await filemanager.delete_file( + user_id=user_id, store_id=s3_simcore_location, s3_object=file_id + ) + + # check that it disappeared + assert ( + await filemanager.entry_exists( + user_id=user_id, store_id=store_id, s3_object=file_id, is_directory=False + ) + is False + ) + + +@pytest.mark.parametrize("files_in_folder", [1, 10]) +async def test_upload_path_source_is_a_folder( + node_ports_config: None, + project_id: str, + tmp_path: Path, + faker: Faker, + user_id: int, + s3_simcore_location: LocationID, + files_in_folder: int, + sync_settings: _SyncSettings, +): + source_dir = tmp_path / f"source-{faker.uuid4()}" + source_dir.mkdir(parents=True, exist_ok=True) + + download_dir = tmp_path / f"download-{faker.uuid4()}" + download_dir.mkdir(parents=True, exist_ok=True) + + for i in range(files_in_folder): + (source_dir / f"file-{i}.txt").write_text("1") + + directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( + f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" + ) + s3_object = SimcoreS3FileID(directory_id) + + upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( + user_id=user_id, + store_id=s3_simcore_location, + store_name=None, + s3_object=s3_object, + path_to_upload=source_dir, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + assert isinstance(upload_result, UploadedFolder) + assert source_dir.exists() + + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + await filemanager.download_path_from_s3( + user_id=user_id, + store_name=None, + store_id=s3_simcore_location, + s3_object=s3_object, + local_path=download_dir, + io_log_redirect_cb=None, + r_clone_settings=sync_settings.r_clone_settings, + progress_bar=progress_bar, + aws_s3_cli_settings=sync_settings.aws_s3_cli_settings, + ) + assert download_dir.exists() + + # ensure all files in download and source directory are the same + file_names: set = {f.name for f in source_dir.glob("*")} & { + f.name for f in download_dir.glob("*") + } + for file_name in file_names: + filecmp.cmp(source_dir / file_name, download_dir / file_name, shallow=False) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py index 10c074591fc..1af6645042a 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py @@ -4,19 +4,20 @@ from collections import deque from pprint import pprint -from typing import Any, Dict, List, Type, Union +from typing import Any import pytest from models_library.services import ServiceInput -from pydantic import ValidationError, confloat, schema_of +from pydantic import Field, ValidationError, schema_of from simcore_sdk.node_ports_v2 import exceptions from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.ports_mapping import InputsList, OutputsList +from typing_extensions import Annotated from utils_port_v2 import create_valid_port_config @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) -def test_empty_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): +def test_empty_ports_mapping(port_class: type[InputsList | OutputsList]): port_mapping = port_class(__root__={}) assert not port_mapping.items() assert not port_mapping.values() @@ -28,8 +29,8 @@ def test_empty_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) -def test_filled_ports_mapping(port_class: Type[Union[InputsList, OutputsList]]): - port_cfgs: Dict[str, Any] = {} +def test_filled_ports_mapping(port_class: type[InputsList | OutputsList]): + port_cfgs: dict[str, Any] = {} for t in ["integer", "number", "boolean", "string"]: port = create_valid_port_config(t) port_cfgs[port["key"]] = port @@ -71,10 +72,10 @@ def test_io_ports_are_not_aliases(): @pytest.fixture -def fake_port_meta() -> Dict[str, Any]: +def fake_port_meta() -> dict[str, Any]: """Service port metadata: defines a list of non-negative numbers""" schema = schema_of( - List[confloat(ge=0)], + list[Annotated[float, Field(ge=0)]], title="list[non-negative number]", ) schema.update( @@ -86,7 +87,7 @@ def fake_port_meta() -> Dict[str, Any]: return port_model.dict(exclude_unset=True, by_alias=True) -def test_validate_port_value_against_schema(fake_port_meta: Dict[str, Any]): +def test_validate_port_value_against_schema(fake_port_meta: dict[str, Any]): # A simcore-sdk Port instance is a combination of both # - the port's metadata # - the port's value @@ -121,7 +122,7 @@ def test_validate_port_value_against_schema(fake_port_meta: Dict[str, Any]): assert schema_error_path == deque([1]) -def test_validate_iolist_against_schema(fake_port_meta: Dict[str, Any]): +def test_validate_iolist_against_schema(fake_port_meta: dict[str, Any]): # Check how errors propagate from a single Port to InputsList # reference port diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py index 41e61669fe5..a03b86bcffc 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py @@ -13,13 +13,14 @@ from unittest.mock import AsyncMock import pytest -from pydantic import BaseModel, conint, schema_of +from pydantic import BaseModel, Field, schema_of from pydantic.error_wrappers import ValidationError from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.port_validation import ( PortUnitError, validate_port_content, ) +from typing_extensions import Annotated def _replace_value_in_dict(item: Any, original_schema: dict[str, Any]): @@ -128,7 +129,7 @@ async def test_port_with_array_of_object(mocker): mocker.patch.object(Port, "_node_ports", new=AsyncMock()) class A(BaseModel): - i: conint(gt=3) + i: Annotated[int, Field(gt=3)] b: bool = False s: str l: list[int] From 17681a57ad24f5067366a79ba005dcb749c9ffcc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 14:23:49 +0200 Subject: [PATCH 007/280] upgrade settings-library --- packages/settings-library/requirements/_base.txt | 7 ++++++- packages/settings-library/requirements/_test.txt | 6 ------ packages/settings-library/requirements/_tools.txt | 10 ---------- .../settings-library/src/settings_library/base.py | 14 ++++++-------- .../src/settings_library/comp_services.py | 6 +++--- .../src/settings_library/docker_registry.py | 11 ++++++----- .../settings-library/src/settings_library/ec2.py | 9 ++++----- .../settings-library/src/settings_library/efs.py | 4 ++-- .../src/settings_library/postgres.py | 14 ++++++++------ .../settings-library/src/settings_library/s3.py | 9 ++++----- .../settings-library/src/settings_library/ssm.py | 9 ++++----- .../src/settings_library/tracing.py | 4 ++-- .../src/settings_library/twilio.py | 6 ++---- .../tests/test__pydantic_settings.py | 5 ++++- packages/settings-library/tests/test_base.py | 3 ++- .../settings-library/tests/test_base_w_postgres.py | 6 ++++-- .../settings-library/tests/test_utils_logging.py | 8 ++++---- 17 files changed, 61 insertions(+), 70 deletions(-) diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index 900c4fea2aa..422fcb01a80 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -1,13 +1,17 @@ +annotated-types==0.7.0 + # via pydantic click==8.1.7 # via typer markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich rich==13.7.1 @@ -21,4 +25,5 @@ typer==0.12.4 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index 1ca7d43dd3c..9a1422f4bc4 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -2,8 +2,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in iniconfig==2.0.0 @@ -39,7 +37,3 @@ six==1.16.0 # via python-dateutil termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt index a75c5397d80..d14257822b0 100644 --- a/packages/settings-library/requirements/_tools.txt +++ b/packages/settings-library/requirements/_tools.txt @@ -67,21 +67,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 296b453e26c..609ed89d6a4 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -3,17 +3,11 @@ from functools import cached_property from typing import Final, get_args, get_origin -from pydantic import ( - BaseConfig, - BaseSettings, - ConfigError, - Extra, - ValidationError, - validator, -) +from pydantic import BaseConfig, ConfigError, Extra, ValidationError, validator from pydantic.error_wrappers import ErrorList, ErrorWrapper from pydantic.fields import ModelField, Undefined from pydantic.typing import is_literal_type +from pydantic_settings import BaseSettings _logger = logging.getLogger(__name__) @@ -70,6 +64,8 @@ class BaseCustomSettings(BaseSettings): SEE tests for details. """ + # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. @validator("*", pre=True) @classmethod def parse_none(cls, v, field: ModelField): @@ -78,6 +74,8 @@ def parse_none(cls, v, field: ModelField): return None return v + # TODO[pydantic]: The `Config` class inherits from another class, please create the `model_config` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. class Config(BaseConfig): case_sensitive = True # All must be capitalized extra = Extra.forbid diff --git a/packages/settings-library/src/settings_library/comp_services.py b/packages/settings-library/src/settings_library/comp_services.py index e3cb628f7b7..7167335c4be 100644 --- a/packages/settings-library/src/settings_library/comp_services.py +++ b/packages/settings-library/src/settings_library/comp_services.py @@ -1,4 +1,4 @@ -from pydantic import ByteSize, NonNegativeInt, validator +from pydantic import ByteSize, NonNegativeInt, field_validator from pydantic.tools import parse_raw_as from settings_library.base import BaseCustomSettings @@ -15,14 +15,14 @@ class ComputationalServices(BaseCustomSettings): ) DEFAULT_RUNTIME_TIMEOUT: NonNegativeInt = 0 - @validator("DEFAULT_MAX_NANO_CPUS", pre=True) + @field_validator("DEFAULT_MAX_NANO_CPUS", mode="before") @classmethod def _set_default_cpus_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: v = _DEFAULT_MAX_NANO_CPUS_VALUE return v - @validator("DEFAULT_MAX_MEMORY", pre=True) + @field_validator("DEFAULT_MAX_MEMORY", mode="before") @classmethod def _set_default_memory_if_negative(cls, v): if v is None or v == "" or int(v) <= 0: diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index bb365cb9785..aa41f74106c 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -1,7 +1,7 @@ from functools import cached_property -from typing import Any, ClassVar +from typing import Any -from pydantic import Field, SecretStr, validator +from pydantic import ConfigDict, Field, SecretStr, field_validator from .base import BaseCustomSettings @@ -23,7 +23,7 @@ class RegistrySettings(BaseCustomSettings): ) REGISTRY_SSL: bool = Field(..., description="access to registry through ssl") - @validator("REGISTRY_PATH", pre=True) + @field_validator("REGISTRY_PATH", mode="before") @classmethod def _escape_none_string(cls, v) -> Any | None: return None if v == "None" else v @@ -36,8 +36,8 @@ def resolved_registry_url(self) -> str: def api_url(self) -> str: return f"{self.REGISTRY_URL}/v2" - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "REGISTRY_AUTH": "True", @@ -48,3 +48,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index 2cd7cf0b9a6..a28fd9335c8 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,6 +1,4 @@ -from typing import Any, ClassVar - -from pydantic import Field +from pydantic import ConfigDict, Field from .base import BaseCustomSettings @@ -13,8 +11,8 @@ class EC2Settings(BaseCustomSettings): EC2_REGION_NAME: str = "us-east-1" EC2_SECRET_ACCESS_KEY: str - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -24,3 +22,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/efs.py b/packages/settings-library/src/settings_library/efs.py index d09b8abb20f..34c48f9dca6 100644 --- a/packages/settings-library/src/settings_library/efs.py +++ b/packages/settings-library/src/settings_library/efs.py @@ -8,7 +8,7 @@ class AwsEfsSettings(BaseCustomSettings): EFS_DNS_NAME: str = Field( description="AWS Elastic File System DNS name", - example="fs-xxx.efs.us-east-1.amazonaws.com", + examples=["fs-xxx.efs.us-east-1.amazonaws.com"], ) EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str EFS_MOUNTED_PATH: Path = Field( @@ -16,7 +16,7 @@ class AwsEfsSettings(BaseCustomSettings): ) EFS_ONLY_ENABLED_FOR_USERIDS: list[int] = Field( description="This is temporary solution so we can enable it for specific users for testing purpose", - example=[1], + examples=[[1]], ) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index f8335bbeed2..b3b122480f1 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,8 +1,7 @@ import urllib.parse from functools import cached_property -from typing import Any, ClassVar -from pydantic import Field, PostgresDsn, SecretStr, validator +from pydantic import Field, PostgresDsn, SecretStr, field_validator from .base import BaseCustomSettings from .basic_types import PortInt @@ -31,7 +30,7 @@ class PostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( default=None, description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", - env=[ + validation_alias=[ "POSTGRES_CLIENT_NAME", # This is useful when running inside a docker container, then the hostname is set each client gets a different name "HOST", @@ -39,7 +38,9 @@ class PostgresSettings(BaseCustomSettings): ], ) - @validator("POSTGRES_MAXSIZE") + # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. + @field_validator("POSTGRES_MAXSIZE") @classmethod def _check_size(cls, v, values): if not (values["POSTGRES_MINSIZE"] <= v): @@ -81,8 +82,8 @@ def dsn_with_query(self) -> str: ) return dsn - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + json_schema_extra={ "examples": [ # minimal required { @@ -94,3 +95,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index cef1bf11be5..5e971283d46 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,6 +1,4 @@ -from typing import Any, ClassVar - -from pydantic import AnyHttpUrl, Field +from pydantic import AnyHttpUrl, ConfigDict, Field from .base import BaseCustomSettings from .basic_types import IDStr @@ -15,8 +13,8 @@ class S3Settings(BaseCustomSettings): S3_REGION: IDStr S3_SECRET_KEY: IDStr - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + json_schema_extra={ "examples": [ { # non AWS use-case @@ -35,3 +33,4 @@ class Config(BaseCustomSettings.Config): }, ], } + ) diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 32b965fa123..05c5200a0b5 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,6 +1,4 @@ -from typing import Any, ClassVar - -from pydantic import AnyHttpUrl, Field, SecretStr +from pydantic import AnyHttpUrl, ConfigDict, Field, SecretStr from .base import BaseCustomSettings @@ -13,8 +11,8 @@ class SSMSettings(BaseCustomSettings): SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr - class Config(BaseCustomSettings.Config): - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "SSM_ACCESS_KEY_ID": "my_access_key_id", @@ -24,3 +22,4 @@ class Config(BaseCustomSettings.Config): } ], } + ) diff --git a/packages/settings-library/src/settings_library/tracing.py b/packages/settings-library/src/settings_library/tracing.py index 28a11cbbf6a..36013bd93ef 100644 --- a/packages/settings-library/src/settings_library/tracing.py +++ b/packages/settings-library/src/settings_library/tracing.py @@ -1,4 +1,4 @@ -from pydantic import AnyUrl, Field, parse_obj_as +from pydantic import AliasChoices, AnyUrl, Field, parse_obj_as from .base import BaseCustomSettings @@ -17,5 +17,5 @@ class TracingSettings(BaseCustomSettings): TRACING_CLIENT_NAME: str = Field( default=UNDEFINED_CLIENT_NAME, description="Name of the application connecting the tracing service", - env=["HOST", "HOSTNAME", "TRACING_CLIENT_NAME"], + validation_alias=AliasChoices("HOST", "HOSTNAME", "TRACING_CLIENT_NAME"), ) diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index eb4ec0c707a..d6284e6b8d0 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -9,7 +9,7 @@ import re from re import Pattern -from pydantic import ConstrainedStr, Field, parse_obj_as +from pydantic import ConfigDict, ConstrainedStr, Field, parse_obj_as from .base import BaseCustomSettings @@ -18,9 +18,7 @@ class CountryCodeStr(ConstrainedStr): # Based on https://countrycode.org/ strip_whitespace: bool = True regex: Pattern[str] | None = re.compile(r"^\d{1,4}") - - class Config: - frozen = True + model_config = ConfigDict(frozen=True) class TwilioSettings(BaseCustomSettings): diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 8cf3eadc30f..44168ff636a 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -13,8 +13,9 @@ """ -from pydantic import BaseSettings, validator +from pydantic import validator from pydantic.fields import ModelField, Undefined +from pydantic_settings import BaseSettings def assert_field_specs( @@ -46,6 +47,8 @@ class Settings(BaseSettings): # Other ways to write down "required" is using ... VALUE_ALSO_REQUIRED: int = ... # type: ignore + # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. @validator("*", pre=True) @classmethod def parse_none(cls, v, values, field: ModelField): diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 7cbd9fa8773..2809d9787e8 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -10,8 +10,9 @@ import pytest import settings_library.base -from pydantic import BaseModel, BaseSettings, ValidationError +from pydantic import BaseModel, ValidationError from pydantic.fields import Field +from pydantic_settings import BaseSettings from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import ( diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index d54d40bf925..df9c4fd6cf3 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -6,7 +6,7 @@ from collections.abc import Callable import pytest -from pydantic import Field, ValidationError +from pydantic import AliasChoices, Field, ValidationError from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from settings_library.base import BaseCustomSettings, DefaultFromEnvFactoryError from settings_library.basic_types import PortInt @@ -49,7 +49,9 @@ class _FakePostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( None, - env=["HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME"], + validation_alias=AliasChoices( + "HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME" + ), ) # diff --git a/packages/settings-library/tests/test_utils_logging.py b/packages/settings-library/tests/test_utils_logging.py index 9054b391333..47c2f316505 100644 --- a/packages/settings-library/tests/test_utils_logging.py +++ b/packages/settings-library/tests/test_utils_logging.py @@ -1,6 +1,6 @@ import logging -from pydantic import Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.basic_types import BootMode from settings_library.utils_logging import MixinLoggingSettings @@ -19,15 +19,15 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): # LOGGING LOG_LEVEL: str = Field( "WARNING", - env=[ + validation_alias=AliasChoices( "APPNAME_LOG_LEVEL", "LOG_LEVEL", - ], + ), ) APPNAME_DEBUG: bool = Field(False, description="Starts app in debug mode") - @validator("LOG_LEVEL") + @field_validator("LOG_LEVEL") @classmethod def _v(cls, value) -> str: return cls.validate_log_level(value) From 81c045ba040920bd488cba6b7e18dd7f16a373e8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 14:38:22 +0200 Subject: [PATCH 008/280] upgrade service-integration --- .../requirements/_base.txt | 11 +- .../requirements/_test.txt | 9 - .../requirements/_tools.txt | 11 - .../_compose_spec_model_autogenerated.py | 200 +++++++++--------- .../src/service_integration/oci_image_spec.py | 33 ++- .../src/service_integration/osparc_config.py | 36 ++-- .../src/service_integration/settings.py | 7 +- .../src/service_integration/versioning.py | 22 +- requirements/constraints.txt | 5 - 9 files changed, 153 insertions(+), 181 deletions(-) diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 904f97c614f..f05f3b49316 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -31,8 +33,6 @@ docker==7.1.0 # via -r requirements/_base.in email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via pytest idna==3.7 # via # email-validator @@ -68,11 +68,13 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==2.9.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-core==2.23.3 + # via pydantic pygments==2.18.0 # via rich pytest==8.3.2 @@ -109,8 +111,6 @@ six==1.16.0 # via python-dateutil text-unidecode==1.3 # via python-slugify -tomli==2.0.1 - # via pytest typer==0.12.4 # via -r requirements/_base.in types-python-dateutil==2.9.0.20240821 @@ -118,6 +118,7 @@ types-python-dateutil==2.9.0.20240821 typing-extensions==4.12.2 # via # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt index 925a176d40c..fa704698091 100644 --- a/packages/service-integration/requirements/_test.txt +++ b/packages/service-integration/requirements/_test.txt @@ -6,10 +6,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # pytest iniconfig==2.0.0 # via # -c requirements/_base.txt @@ -48,11 +44,6 @@ rpds-py==0.20.0 # referencing termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # -c requirements/_base.txt - # coverage - # pytest types-docker==7.1.0.20240821 # via -r requirements/_test.in types-jsonschema==4.23.0.20240813 diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index f76d26f846f..354746f70e4 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -69,22 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py index a390a469a41..b762def3b68 100644 --- a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py +++ b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py @@ -7,7 +7,8 @@ from enum import Enum from typing import Any -from pydantic import BaseModel, ConstrainedInt, Extra, Field, conint, constr +from pydantic import BaseModel, ConfigDict, ConstrainedInt, Field, StringConstraints +from typing_extensions import Annotated # MODIFICATIONS ------------------------------------------------------------------------- # @@ -28,8 +29,7 @@ class PortInt(ConstrainedInt): class Configuration(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") source: str | None = None target: str | None = None @@ -39,8 +39,7 @@ class Config: class CredentialSpec(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") config: str | None = None file: str | None = None @@ -54,31 +53,29 @@ class Condition(Enum): class DependsOn(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") condition: Condition class Extend(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") service: str file: str | None = None class Logging(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") driver: str | None = None - options: dict[constr(regex=r"^.+$"), str | float | None] | None = None + options: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float | None + ] | None = None class Port(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") mode: str | None = None host_ip: str | None = None @@ -96,8 +93,7 @@ class PullPolicy(Enum): class Secret1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") source: str | None = None target: str | None = None @@ -107,38 +103,33 @@ class Config: class Ulimit(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") hard: int soft: int class Bind(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") propagation: str | None = None create_host_path: bool | None = None class Volume2(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") nocopy: bool | None = None class Tmpfs(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") - size: conint(ge=0) | str | None = None + size: Annotated[int, Field(ge=0)] | str | None = None class Volume1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") type: str source: str | None = None @@ -151,8 +142,7 @@ class Config: class Healthcheck(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") disable: bool | None = None interval: str | None = None @@ -168,8 +158,7 @@ class Order(Enum): class RollbackConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") parallelism: int | None = None delay: str | None = None @@ -185,8 +174,7 @@ class Order1(Enum): class UpdateConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") parallelism: int | None = None delay: str | None = None @@ -197,16 +185,14 @@ class Config: class Limits(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") cpus: float | str | None = None memory: str | None = None class RestartPolicy(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") condition: str | None = None delay: str | None = None @@ -215,15 +201,13 @@ class Config: class Preference(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") spread: str | None = None class Placement(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") constraints: list[str] | None = None preferences: list[Preference] | None = None @@ -231,16 +215,14 @@ class Config: class DiscreteResourceSpec(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") kind: str | None = None value: float | None = None class GenericResource(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") discrete_resource_spec: DiscreteResourceSpec | None = None @@ -250,34 +232,32 @@ class GenericResources(BaseModel): class ConfigItem(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") subnet: str | None = None ip_range: str | None = None gateway: str | None = None - aux_addresses: dict[constr(regex=r"^.+$"), str] | None = None + aux_addresses: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str + ] | None = None class Ipam(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") driver: str | None = None config: list[ConfigItem] | None = None - options: dict[constr(regex=r"^.+$"), str] | None = None + options: dict[Annotated[str, StringConstraints(pattern=r"^.+$")], str] | None = None class External(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None class External1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None @@ -295,32 +275,34 @@ class ListOfStrings(BaseModel): class ListOrDict(BaseModel): - __root__: (dict[constr(regex=r".+"), str | float | bool | None] | list[str]) + __root__: ( + dict[ + Annotated[str, StringConstraints(pattern=r".+")], str | float | bool | None + ] + | list[str] + ) class BlkioLimit(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") path: str | None = None rate: int | str | None = None class BlkioWeight(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") path: str | None = None weight: int | None = None class Constraints(BaseModel): - __root__: Any + __root__: Any = None class BuildItem(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") context: str | None = None dockerfile: str | None = None @@ -335,8 +317,7 @@ class Config: class BlkioConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") device_read_bps: list[BlkioLimit] | None = None device_read_iops: list[BlkioLimit] | None = None @@ -347,8 +328,7 @@ class Config: class Network1(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") aliases: ListOfStrings | None = None ipv4_address: str | None = None @@ -358,8 +338,7 @@ class Config: class Device(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") capabilities: ListOfStrings | None = None count: str | int | None = None @@ -373,12 +352,13 @@ class Devices(BaseModel): class Network(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None ipam: Ipam | None = None external: External | None = None internal: bool | None = None @@ -388,32 +368,33 @@ class Config: class Volume(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None external: External1 | None = None labels: ListOrDict | None = None class Secret(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None file: str | None = None external: External2 | None = None labels: ListOrDict | None = None driver: str | None = None - driver_opts: dict[constr(regex=r"^.+$"), str | float] | None = None + driver_opts: dict[ + Annotated[str, StringConstraints(pattern=r"^.+$")], str | float + ] | None = None template_driver: str | None = None class ComposeSpecConfig(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") name: str | None = None file: str | None = None @@ -427,8 +408,7 @@ class StringOrList(BaseModel): class Reservations(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") cpus: float | str | None = None memory: str | None = None @@ -437,16 +417,14 @@ class Config: class Resources(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") limits: Limits | None = None reservations: Reservations | None = None class Deployment(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") mode: str | None = None endpoint_mode: str | None = None @@ -460,8 +438,7 @@ class Config: class Service(BaseModel): - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") deploy: Deployment | None = None build: str | BuildItem | None = None @@ -472,8 +449,8 @@ class Config: command: str | list[str] | None = None configs: list[str | Configuration] | None = None container_name: str | None = None - cpu_count: conint(ge=0) | None = None - cpu_percent: conint(ge=0, le=100) | None = None + cpu_count: Annotated[int, Field(ge=0)] | None = None + cpu_percent: Annotated[int, Field(ge=0, le=100)] | None = None cpu_shares: float | str | None = None cpu_quota: float | str | None = None cpu_period: float | str | None = None @@ -483,7 +460,10 @@ class Config: cpuset: str | None = None credential_spec: CredentialSpec | None = None depends_on: None | ( - ListOfStrings | dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), DependsOn] + ListOfStrings + | dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], DependsOn + ] ) = None device_cgroup_rules: ListOfStrings | None = None devices: list[str] | None = None @@ -515,10 +495,14 @@ class Config: memswap_limit: float | str | None = None network_mode: str | None = None networks: None | ( - ListOfStrings | dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Network1 | None] + ListOfStrings + | dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], + Network1 | None, + ] ) = None oom_kill_disable: bool | None = None - oom_score_adj: conint(ge=-1000, le=1000) | None = None + oom_score_adj: Annotated[int, Field(ge=-1000, le=1000)] | None = None pid: str | None = None pids_limit: float | str | None = None platform: str | None = None @@ -540,7 +524,9 @@ class Config: storage_opt: dict[str, Any] | None = None tmpfs: StringOrList | None = None tty: bool | None = None - ulimits: dict[constr(regex=r"^[a-z]+$"), int | Ulimit] | None = None + ulimits: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-z]+$")], int | Ulimit + ] | None = None user: str | None = None userns_mode: str | None = None volumes: list[str | Volume1] | None = None @@ -553,15 +539,27 @@ class ComposeSpecification(BaseModel): The Compose file is a YAML file defining a multi-containers based application. """ - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") version: str | None = Field( None, description="Version of the Compose specification used. Tools not implementing required version MUST reject the configuration file.", ) - services: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Service] | None = None - networks: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Network] | None = None - volumes: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Volume] | None = None - secrets: dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), Secret] | None = None - configs: None | (dict[constr(regex=r"^[a-zA-Z0-9._-]+$"), ComposeSpecConfig]) = None + services: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Service + ] | None = None + networks: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Network + ] | None = None + volumes: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Volume + ] | None = None + secrets: dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], Secret + ] | None = None + configs: None | ( + dict[ + Annotated[str, StringConstraints(pattern=r"^[a-zA-Z0-9._-]+$")], + ComposeSpecConfig, + ] + ) = None diff --git a/packages/service-integration/src/service_integration/oci_image_spec.py b/packages/service-integration/src/service_integration/oci_image_spec.py index e07a5e4cafc..3b9e45b46ab 100644 --- a/packages/service-integration/src/service_integration/oci_image_spec.py +++ b/packages/service-integration/src/service_integration/oci_image_spec.py @@ -11,8 +11,7 @@ from models_library.basic_types import SHA1Str, VersionStr from models_library.utils.labels_annotations import from_labels, to_labels -from pydantic import BaseModel, Field -from pydantic.config import Extra +from pydantic import BaseModel, ConfigDict, Field from pydantic.networks import AnyUrl # @@ -100,22 +99,20 @@ class OciImageSpecAnnotations(BaseModel): None, description="Digest of the image this image is based on (string)", ) - - class Config: - alias_generator = _underscore_as_dot - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=_underscore_as_dot, populate_by_name=True, extra="forbid" + ) @classmethod def from_labels_annotations( cls, labels: dict[str, str] ) -> "OciImageSpecAnnotations": data = from_labels(labels, prefix_key=OCI_LABEL_PREFIX, trim_key_head=False) - return cls.parse_obj(data) + return cls.model_validate(data) def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OCI_LABEL_PREFIX, ) return labels @@ -131,30 +128,30 @@ class LabelSchemaAnnotations(BaseModel): build_date: datetime vcs_ref: str vcs_url: AnyUrl - - class Config: - alias_generator = lambda field_name: field_name.replace("_", "-") - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=lambda field_name: field_name.replace("_", "-"), + populate_by_name=True, + extra="forbid", + ) @classmethod def create_from_env(cls) -> "LabelSchemaAnnotations": data = {} - for field_name in cls.__fields__: + for field_name in cls.model_fields: if value := os.environ.get(field_name.upper()): data[field_name] = value - return cls.parse_obj(data) + return cls.model_validate(data) def to_oci_data(self) -> dict[str, Any]: """Collects data that be converted to OCI labels. WARNING: label-schema has be deprecated in favor of OCI image specs """ - convertable_data = self.dict( + convertable_data = self.model_dump( include=set(_TO_OCI.keys()), exclude_unset=True, exclude_none=True ) assert set(convertable_data.keys()).issubset( # nosec - set(self.__fields__.keys()) + set(self.model_fields.keys()) ) # nosec return {_TO_OCI[key]: value for key, value in convertable_data.items()} diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 1a340729e41..7d08b71f107 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -36,9 +36,14 @@ from_labels, to_labels, ) -from pydantic import NonNegativeInt, ValidationError -from pydantic.class_validators import root_validator, validator -from pydantic.config import Extra +from pydantic import ( + ConfigDict, + NonNegativeInt, + ValidationError, + field_validator, + model_validator, +) +from pydantic.class_validators import field_validator from pydantic.fields import Field from pydantic.main import BaseModel @@ -101,7 +106,7 @@ class MetadataConfig(ServiceMetaDataPublished): exclude=True, ) - @validator("contact") + @field_validator("contact") @classmethod def _check_contact_in_authors(cls, v, values): """catalog service relies on contact and author to define access rights""" @@ -175,7 +180,7 @@ class SettingsItem(BaseModel): description="The value of the service setting (shall follow Docker REST API scheme for services", ) - @validator("type_", pre=True) + @field_validator("type_", mode="before") @classmethod def ensure_backwards_compatible_setting_type(cls, v): if v == "resources": @@ -183,7 +188,7 @@ def ensure_backwards_compatible_setting_type(cls, v): return "Resources" return v - @validator("value", pre=True) + @field_validator("value", mode="before") @classmethod def check_value_against_custom_types(cls, v, values): if (type_ := values.get("type_")) and type_ == "ContainerSpec": @@ -192,9 +197,7 @@ def check_value_against_custom_types(cls, v, values): class ValidatingDynamicSidecarServiceLabels(DynamicSidecarServiceLabels): - class Config: - extra = Extra.allow - allow_population_by_field_name = True + model_config = ConfigDict(extra="allow", populate_by_name=True) def _underscore_as_minus(field_name: str) -> str: @@ -225,7 +228,7 @@ class RuntimeConfig(BaseModel): settings: list[SettingsItem] = Field(default_factory=list) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def ensure_compatibility(cls, v): # NOTE: if changes are applied to `DynamicSidecarServiceLabels` @@ -242,25 +245,24 @@ def ensure_compatibility(cls, v): return v - class Config: - alias_generator = _underscore_as_minus - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict( + alias_generator=_underscore_as_minus, populate_by_name=True, extra="forbid" + ) @classmethod def from_yaml(cls, path: Path) -> "RuntimeConfig": with path.open() as fh: data = yaml_safe_load(fh) - return cls.parse_obj(data) + return cls.model_validate(data) @classmethod def from_labels_annotations(cls, labels: dict[str, str]) -> "RuntimeConfig": data = from_labels(labels, prefix_key=OSPARC_LABEL_PREFIXES[1]) - return cls.parse_obj(data) + return cls.model_validate(data) def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OSPARC_LABEL_PREFIXES[1], ) return labels diff --git a/packages/service-integration/src/service_integration/settings.py b/packages/service-integration/src/service_integration/settings.py index 70c971c8db9..b2aa86f87a9 100644 --- a/packages/service-integration/src/service_integration/settings.py +++ b/packages/service-integration/src/service_integration/settings.py @@ -1,4 +1,5 @@ -from pydantic import BaseModel, BaseSettings, Field, SecretStr +from pydantic import BaseModel, Field, SecretStr +from pydantic_settings import BaseSettings, SettingsConfigDict class Registry(BaseModel): @@ -26,9 +27,7 @@ class AppSettings(BaseSettings): COMPOSE_VERSION: str = Field( "3.7", description="version of the docker-compose spec" ) - - class Config: - env_file_encoding = "utf-8" + model_config = SettingsConfigDict(env_file_encoding="utf-8") # TODO: load from ~/.osparc/service-integration.json or env file # TODO: add access to secrets diff --git a/packages/service-integration/src/service_integration/versioning.py b/packages/service-integration/src/service_integration/versioning.py index 3ed56868e50..3b87ce32791 100644 --- a/packages/service-integration/src/service_integration/versioning.py +++ b/packages/service-integration/src/service_integration/versioning.py @@ -1,15 +1,13 @@ -import re from datetime import datetime -from re import Pattern -from typing import Any, ClassVar +from typing import Annotated from models_library.basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from packaging.version import Version -from pydantic import BaseModel, ConstrainedStr, Field +from pydantic import BaseModel, ConfigDict, Field, StringConstraints - -class SemanticVersionStr(ConstrainedStr): - regex: Pattern[str] | None = re.compile(SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) +SemanticVersionStr = Annotated[ + str, StringConstraints(pattern=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) +] def bump_version_string(current_version: str, bump: str) -> str: @@ -52,8 +50,8 @@ class ExecutableVersionInfo(BaseModel): version: SemanticVersionStr released: datetime - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "display_name": "SEMCAD X", "display_version": "Matterhorn Student Edition 1", @@ -63,6 +61,7 @@ class Config: "released": "2021-11-19T14:58:45.900979", } } + ) class ServiceVersionInfo(BaseModel): @@ -72,11 +71,12 @@ class ServiceVersionInfo(BaseModel): ) released: datetime = Field(..., description="Publication/release date") - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "version": "1.0.0", # e.g. first time released as an osparc "integration_version": "2.1.0", "released": "2021-11-19T14:58:45.900979", } } + ) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3316f4276ed..3e40b2694d4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -32,11 +32,6 @@ urllib3>=1.26.5 # https://github.com/advisories/GH # Breaking changes ----------------------------------------------------------------------------------------- # - -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/4481 -fastapi<0.100.0 -pydantic<2.0 - # with new released version 1.0.0 (https://github.com/aio-libs/aiozipkin/releases). # TODO: includes async features https://docs.sqlalchemy.org/en/14/changelog/migration_20.html sqlalchemy<2.0 From 3f4901e0f5949cd84f3fe1a0ef41e7d4e50d870b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 14:55:28 +0200 Subject: [PATCH 009/280] upgrade settings-library --- packages/settings-library/requirements/_base.in | 4 ++-- packages/settings-library/requirements/_base.txt | 5 +++++ packages/settings-library/requirements/_test.txt | 4 +++- packages/settings-library/src/settings_library/base.py | 6 ++---- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/settings-library/requirements/_base.in b/packages/settings-library/requirements/_base.in index ec1d848cc85..01734738bcb 100644 --- a/packages/settings-library/requirements/_base.in +++ b/packages/settings-library/requirements/_base.in @@ -3,8 +3,8 @@ # --constraint ../../../requirements/constraints.txt -pydantic>=1.9 - +pydantic +pydantic-settings # extra rich diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index 422fcb01a80..e9d934b2425 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -10,10 +10,15 @@ pydantic==2.9.1 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in + # pydantic-settings pydantic-core==2.23.3 # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/_base.in pygments==2.18.0 # via rich +python-dotenv==1.0.1 + # via pydantic-settings rich==13.7.1 # via # -r requirements/_base.in diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index 9a1422f4bc4..56bf15d9c2d 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -32,7 +32,9 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in six==1.16.0 # via python-dateutil termcolor==2.4.0 diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 609ed89d6a4..60aeb076954 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -3,7 +3,7 @@ from functools import cached_property from typing import Final, get_args, get_origin -from pydantic import BaseConfig, ConfigError, Extra, ValidationError, validator +from pydantic import BaseConfig, ConfigError, Extra, ValidationError, field_validator from pydantic.error_wrappers import ErrorList, ErrorWrapper from pydantic.fields import ModelField, Undefined from pydantic.typing import is_literal_type @@ -64,9 +64,7 @@ class BaseCustomSettings(BaseSettings): SEE tests for details. """ - # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. - # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod def parse_none(cls, v, field: ModelField): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! From 5e42c7c9093a02493b17bdbb3b9fad89ba032cbb Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 16 Sep 2024 15:47:20 +0200 Subject: [PATCH 010/280] bump-pydantic --- services/api-server/requirements/_base.txt | 331 ++++++++++++++++-- services/api-server/requirements/_test.txt | 15 - services/api-server/requirements/_tools.txt | 10 - .../core/settings.py | 29 +- .../models/api_resources.py | 16 +- .../models/pagination.py | 18 +- .../models/schemas/errors.py | 9 +- .../models/schemas/files.py | 25 +- .../models/schemas/jobs.py | 60 ++-- .../models/schemas/meta.py | 10 +- .../models/schemas/profiles.py | 14 +- .../models/schemas/solvers.py | 34 +- .../services/catalog.py | 6 +- .../services/director_v2.py | 16 +- 14 files changed, 439 insertions(+), 154 deletions(-) diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index a132d7fb2a6..cdf22802085 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -1,16 +1,55 @@ aio-pika==9.4.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.2 + # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in aiodebug==2.3.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.21.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiofiles==23.2.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in + # -r requirements/_base.in aiohttp==3.9.3 - # via aiodocker + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in + # aiodocker aiopg==1.4.0 + # via + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in + # -r requirements/_base.in aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp alembic==1.13.1 + # via + # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -19,12 +58,17 @@ anyio==4.3.0 # starlette # watchfiles arrow==1.3.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -33,6 +77,20 @@ attrs==23.2.0 # jsonschema certifi==2024.2.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # httpcore # httpx cffi==1.16.0 @@ -42,22 +100,46 @@ click==8.1.7 # typer # uvicorn cryptography==42.0.5 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via # fastapi # pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.114.2 # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in # fastapi-pagination # prometheus-fastapi-instrumentator +fastapi-cli==0.0.5 + # via fastapi fastapi-pagination==0.12.17 + # via + # -c requirements/./constraints.txt + # -r requirements/_base.in faststream==0.5.10 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via # aiohttp @@ -73,7 +155,24 @@ httpcore==1.0.5 httptools==0.6.1 # via uvicorn httpx==0.27.0 - # via fastapi + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # fastapi idna==3.6 # via # anyio @@ -83,10 +182,46 @@ idna==3.6 itsdangerous==2.1.2 # via fastapi jinja2==3.1.3 - # via fastapi + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi jsonschema==3.2.0 + # via + # -c requirements/./constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in mako==1.3.2 - # via alembic + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # alembic markdown-it-py==3.0.0 # via rich markupsafe==2.1.5 @@ -100,46 +235,153 @@ multidict==6.0.5 # aiohttp # yarl orjson==3.10.0 - # via fastapi + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/_base.in + # fastapi packaging==24.0 + # via + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in + # -r requirements/_base.in pamqp==3.3.0 # via aiormq parse==1.20.2 + # via -r requirements/_base.in pint==0.23 + # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in prometheus-client==0.20.0 - # via prometheus-fastapi-instrumentator + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy pycparser==2.22 # via cffi -pydantic==1.10.14 +pydantic==2.9.1 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in + # -r requirements/_base.in # fast-depends # fastapi # fastapi-pagination + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.3 + # via pydantic +pydantic-extra-types==2.9.0 + # via fastapi +pydantic-settings==2.5.2 + # via fastapi pygments==2.17.2 # via rich pyinstrument==4.6.2 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in pyrsistent==0.20.0 # via jsonschema python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 # via - # pydantic + # pydantic-settings # uvicorn python-multipart==0.0.9 # via fastapi pyyaml==6.0.1 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_base.in # fastapi # uvicorn redis==5.0.4 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in rich==13.7.1 - # via typer + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # typer setuptools==69.2.0 # via jsonschema shellingham==1.5.4 @@ -154,9 +396,25 @@ sniffio==1.3.1 # httpx sqlalchemy==1.4.52 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.27.0 +starlette==0.38.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -180,9 +438,23 @@ tenacity==8.5.0 # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in toolz==0.12.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tqdm==4.66.2 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in typer==0.12.3 - # via faststream + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/_base.in + # fastapi-cli + # faststream types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.10.0 @@ -190,18 +462,35 @@ typing-extensions==4.10.0 # aiodebug # aiodocker # alembic - # anyio # fastapi # fastapi-pagination # faststream # pint # pydantic + # pydantic-core # typer - # uvicorn ujson==5.9.0 - # via fastapi + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi uvicorn==0.29.0 - # via fastapi + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # fastapi + # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 @@ -210,6 +499,8 @@ websockets==12.0 # via uvicorn yarl==1.9.4 # via + # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aio-pika # aiohttp # aiormq diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 8a1ddb9caa4..2c7fec18f12 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -19,10 +19,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp attrs==23.2.0 # via # -c requirements/_base.txt @@ -91,11 +87,6 @@ ecdsa==0.19.0 # moto # python-jose # sshpubkeys -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in flask==2.1.3 @@ -318,11 +309,6 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sshpubkeys==3.3.1 # via moto -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in types-awscrt==0.21.2 @@ -335,7 +321,6 @@ typing-extensions==4.10.0 # via # -c requirements/_base.txt # alembic - # anyio # boto3-stubs # mypy # sqlalchemy2-stubs diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt index ee67b7d505f..a741d4f592a 100644 --- a/services/api-server/requirements/_tools.txt +++ b/services/api-server/requirements/_tools.txt @@ -91,22 +91,12 @@ setuptools==69.2.0 # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index 9cc61e1c11a..dfc97394199 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -1,8 +1,14 @@ from functools import cached_property from models_library.basic_types import BootModeEnum, LogLevel -from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr -from pydantic.class_validators import validator +from pydantic import ( + AliasChoices, + Field, + NonNegativeInt, + PositiveInt, + SecretStr, + field_validator, +) from settings_library.base import BaseCustomSettings from settings_library.catalog import CatalogSettings from settings_library.director_v2 import DirectorV2Settings @@ -24,11 +30,14 @@ class WebServerSettings(WebServerBaseSettings, MixinSessionSettings): description="Secret key to encrypt cookies. " 'TIP: python3 -c "from cryptography.fernet import *; print(Fernet.generate_key())"', min_length=44, - env=["SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY"], + validation_alias=AliasChoices( + "SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY" + ), ) WEBSERVER_SESSION_NAME: str = DEFAULT_SESSION_COOKIE_NAME - @validator("WEBSERVER_SESSION_SECRET_KEY") + @field_validator("WEBSERVER_SESSION_SECRET_KEY") + @classmethod @classmethod def check_valid_fernet_key(cls, v): return cls.do_check_valid_fernet_key(v) @@ -41,21 +50,25 @@ class BasicSettings(BaseCustomSettings, MixinLoggingSettings): # DEVELOPMENT API_SERVER_DEV_FEATURES_ENABLED: bool = Field( default=False, - env=["API_SERVER_DEV_FEATURES_ENABLED", "FAKE_API_SERVER_ENABLED"], + validation_alias=AliasChoices( + "API_SERVER_DEV_FEATURES_ENABLED", "FAKE_API_SERVER_ENABLED" + ), ) # LOGGING LOG_LEVEL: LogLevel = Field( default=LogLevel.INFO.value, - env=["API_SERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices("API_SERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices( + "API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/api-server/src/simcore_service_api_server/models/api_resources.py b/services/api-server/src/simcore_service_api_server/models/api_resources.py index 3f64fd323c0..88f57e41f5e 100644 --- a/services/api-server/src/simcore_service_api_server/models/api_resources.py +++ b/services/api-server/src/simcore_service_api_server/models/api_resources.py @@ -2,7 +2,7 @@ import urllib.parse from typing import Any -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from pydantic.types import ConstrainedStr # RESOURCE NAMES https://cloud.google.com/apis/design/resource_names @@ -32,9 +32,7 @@ class RelativeResourceName(ConstrainedStr): regex = re.compile(_RELATIVE_RESOURCE_NAME_RE) - - class Config: - frozen = True + model_config = ConfigDict(frozen=True) # NOTE: we quote parts in a single resource_name and unquote when split @@ -67,10 +65,12 @@ def split_resource_name(resource_name: RelativeResourceName) -> list[str]: # Resource IDs must be clearly documented whether they are assigned by the client, the server, or either # class BaseResource(BaseModel): - name: RelativeResourceName = Field(None, example="solvers/isolve/releases/1.2.3") - id: Any = Field(None, description="Resource ID", example="1.2.3") # noqa: A003 + name: RelativeResourceName = Field(None, examples=["solvers/isolve/releases/1.2.3"]) + id: Any = Field(None, description="Resource ID", examples=["1.2.3"]) # noqa: A003 class BaseCollection(BaseModel): - name: RelativeResourceName = Field(None, example="solvers/isolve/releases") - id: Any = Field(None, description="Collection ID", example="releases") # noqa: A003 + name: RelativeResourceName = Field(None, examples=["solvers/isolve/releases"]) + id: Any = Field( + None, description="Collection ID", examples=["releases"] + ) # noqa: A003 diff --git a/services/api-server/src/simcore_service_api_server/models/pagination.py b/services/api-server/src/simcore_service_api_server/models/pagination.py index 44013b068e5..bbe99af5bbf 100644 --- a/services/api-server/src/simcore_service_api_server/models/pagination.py +++ b/services/api-server/src/simcore_service_api_server/models/pagination.py @@ -7,7 +7,7 @@ """ from collections.abc import Sequence -from typing import Any, ClassVar, Generic, TypeAlias, TypeVar +from typing import Generic, TypeAlias, TypeVar from fastapi_pagination.limit_offset import LimitOffsetParams from fastapi_pagination.links.limit_offset import ( @@ -18,8 +18,7 @@ MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, ) from models_library.utils.pydantic_tools_extension import FieldNotRequired -from pydantic import Field, NonNegativeInt, validator -from pydantic.generics import GenericModel +from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt T = TypeVar("T") @@ -35,7 +34,7 @@ PaginationParams: TypeAlias = LimitOffsetParams -class OnePage(GenericModel, Generic[T]): +class OnePage(BaseModel, Generic[T]): """ A single page is used to envelope a small sequence that does not require pagination @@ -47,7 +46,7 @@ class OnePage(GenericModel, Generic[T]): items: Sequence[T] total: NonNegativeInt = FieldNotRequired() - @validator("total", pre=True) + @field_validator("total", mode="before") @classmethod def check_total(cls, v, values): items = values["items"] @@ -60,9 +59,9 @@ def check_total(cls, v, values): return v - class Config: - frozen = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + frozen=True, + json_schema_extra={ "examples": [ { "total": 1, @@ -72,7 +71,8 @@ class Config: "items": ["one"], }, ], - } + }, + ) __all__: tuple[str, ...] = ( diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/errors.py b/services/api-server/src/simcore_service_api_server/models/schemas/errors.py index 306ac959058..3243f5e44b9 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/errors.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/errors.py @@ -1,6 +1,6 @@ -from typing import Any, ClassVar +from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class ErrorGet(BaseModel): @@ -11,8 +11,8 @@ class ErrorGet(BaseModel): # - https://github.com/ITISFoundation/osparc-simcore/issues/2446 errors: list[Any] - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "errors": [ "some error message", @@ -20,3 +20,4 @@ class Config: ] } } + ) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/files.py b/services/api-server/src/simcore_service_api_server/models/schemas/files.py index eece67dfa59..9ac6c252899 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/files.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/files.py @@ -1,6 +1,6 @@ from mimetypes import guess_type from pathlib import Path -from typing import Any, ClassVar +from typing import Annotated from urllib.parse import quote as _quote from urllib.parse import unquote as _unquote from uuid import UUID, uuid3 @@ -14,18 +14,18 @@ AnyUrl, BaseModel, ByteSize, - ConstrainedStr, + ConfigDict, Field, + StringConstraints, + field_validator, parse_obj_as, - validator, ) from servicelib.file_utils import create_sha256_checksum _NAMESPACE_FILEID_KEY = UUID("aa154444-d22d-4290-bb15-df37dba87865") -class FileName(ConstrainedStr): - strip_whitespace = True +FileName = Annotated[str, StringConstraints(strip_whitespace=True)] class ClientFile(BaseModel): @@ -46,7 +46,9 @@ class File(BaseModel): filename: str = Field(..., description="Name of the file with extension") content_type: str | None = Field( - default=None, description="Guess of type content [EXPERIMENTAL]" + default=None, + description="Guess of type content [EXPERIMENTAL]", + validate_default=True, ) sha256_checksum: SHA256Str | None = Field( default=None, @@ -55,9 +57,9 @@ class File(BaseModel): ) e_tag: ETag | None = Field(default=None, description="S3 entity tag") - class Config: - allow_population_by_field_name = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ "examples": [ # complete { @@ -72,9 +74,10 @@ class Config: "filename": "whitepaper.pdf", }, ] - } + }, + ) - @validator("content_type", always=True, pre=True) + @field_validator("content_type", mode="before") @classmethod def guess_content_type(cls, v, values): if v is None: diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index a190d4e182b..93c2982e136 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -1,7 +1,7 @@ import datetime import hashlib import logging -from typing import Any, ClassVar, TypeAlias +from typing import Annotated, TypeAlias from uuid import UUID, uuid4 from models_library.projects import ProjectID @@ -9,24 +9,22 @@ from models_library.projects_state import RunningState from pydantic import ( BaseModel, - ConstrainedInt, - Extra, + ConfigDict, Field, HttpUrl, PositiveInt, StrictBool, StrictFloat, StrictInt, + TypeAdapter, ValidationError, - parse_obj_as, - validator, + field_validator, ) from servicelib.logging_utils import LogLevelInt, LogMessageStr from starlette.datastructures import Headers from ...models.schemas.files import File from ...models.schemas.solvers import Solver -from .._utils_pydantic import BaseConfig from ..api_resources import ( RelativeResourceName, compose_resource_name, @@ -69,10 +67,9 @@ class JobInputs(BaseModel): # TODO: gibt es platz fuer metadata? - class Config(BaseConfig): - frozen = True - allow_mutation = False - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + frozen=True, + json_schema_extra={ "example": { "values": { "x": 4.33, @@ -85,7 +82,8 @@ class Config(BaseConfig): }, } } - } + }, + ) def compute_checksum(self): return _compute_keyword_arguments_checksum(self.values) @@ -102,10 +100,9 @@ class JobOutputs(BaseModel): # TODO: an error might have occurred at the level of the job, i.e. affects all outputs, or only # on one specific output. - class Config(BaseConfig): - frozen = True - allow_mutation = False - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + frozen=True, + json_schema_extra={ "example": { "job_id": "99d9ac65-9f10-4e2f-a433-b5e412bb037b", "results": { @@ -119,7 +116,8 @@ class Config(BaseConfig): }, }, } - } + }, + ) def compute_results_checksum(self): return _compute_keyword_arguments_checksum(self.results) @@ -179,8 +177,8 @@ class Job(BaseModel): ..., description="Link to the job outputs (sub-collection)" ) - class Config(BaseConfig): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "id": "f622946d-fd29-35b9-a193-abdd1095167c", "name": "solvers/isolve/releases/1.3.4/jobs/f622946d-fd29-35b9-a193-abdd1095167c", @@ -192,8 +190,9 @@ class Config(BaseConfig): "outputs_url": "https://api.osparc.io/v0/solvers/isolve/releases/1.3.4/jobs/f622946d-fd29-35b9-a193-abdd1095167c/outputs", } } + ) - @validator("name", pre=True) + @field_validator("name", mode="before") @classmethod def check_name(cls, v, values): _id = str(values["id"]) @@ -247,9 +246,7 @@ def resource_name(self) -> str: return self.name -class PercentageInt(ConstrainedInt): - ge = 0 - le = 100 +PercentageInt = Annotated[int, Field(ge=0, le=100)] class JobStatus(BaseModel): @@ -259,7 +256,7 @@ class JobStatus(BaseModel): job_id: JobID state: RunningState - progress: PercentageInt = Field(default=PercentageInt(0)) + progress: PercentageInt = Field(default=0) # Timestamps on states submitted_at: datetime.datetime = Field( @@ -274,8 +271,8 @@ class JobStatus(BaseModel): description="Timestamp at which the solver finished or killed execution or None if the event did not occur", ) - class Config(BaseConfig): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "job_id": "145beae4-a3a8-4fde-adbb-4e8257c2c083", "state": RunningState.STARTED, @@ -285,31 +282,31 @@ class Config(BaseConfig): "stopped_at": None, } } + ) class JobPricingSpecification(BaseModel): pricing_plan: PositiveInt = Field(..., alias="x-pricing-plan") pricing_unit: PositiveInt = Field(..., alias="x-pricing-unit") - class Config: - extra = Extra.ignore + model_config = ConfigDict(extra="ignore") @classmethod def create_from_headers(cls, headers: Headers) -> "JobPricingSpecification | None": try: - return parse_obj_as(JobPricingSpecification, headers) + return TypeAdapter(cls).validate_python(headers) except ValidationError: return None class JobLog(BaseModel): job_id: ProjectID - node_id: NodeID | None + node_id: NodeID | None = None log_level: LogLevelInt messages: list[LogMessageStr] - class Config(BaseConfig): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "job_id": "145beae4-a3a8-4fde-adbb-4e8257c2c083", "node_id": "3742215e-6756-48d2-8b73-4d043065309f", @@ -317,3 +314,4 @@ class Config(BaseConfig): "messages": ["PROGRESS: 5/10"], } } + ) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/meta.py b/services/api-server/src/simcore_service_api_server/models/schemas/meta.py index ea358f1433f..64f80a45b9b 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/meta.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/meta.py @@ -1,15 +1,12 @@ -from typing import ClassVar - from models_library.api_schemas__common.meta import BaseMeta -from pydantic import AnyHttpUrl +from pydantic import AnyHttpUrl, ConfigDict class Meta(BaseMeta): docs_url: AnyHttpUrl docs_dev_url: AnyHttpUrl - - class Config: - schema_extra: ClassVar = { + model_config = ConfigDict( + json_schema_extra={ "example": { "name": "simcore_service_foo", "version": "2.4.45", @@ -18,3 +15,4 @@ class Config: "docs_dev_url": "https://api.osparc.io/dev/doc", } } + ) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py b/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py index 8f86f2e693f..76b283aa4a9 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py @@ -1,17 +1,16 @@ from enum import auto -from typing import Any, ClassVar from models_library.emails import LowerCaseEmailStr from models_library.users import FirstNameStr, LastNameStr, UserID from models_library.utils.enums import StrAutoEnum -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from ..domain.groups import Groups class ProfileCommon(BaseModel): - first_name: FirstNameStr | None = Field(None, example="James") - last_name: LastNameStr | None = Field(None, example="Maxwell") + first_name: FirstNameStr | None = Field(None, examples=["James"]) + last_name: LastNameStr | None = Field(None, examples=["Maxwell"]) class ProfileUpdate(ProfileCommon): @@ -39,15 +38,15 @@ class Profile(ProfileCommon): max_length=40, ) - @validator("role", pre=True) + @field_validator("role", mode="before") @classmethod def enforce_role_upper(cls, v): if v: return v.upper() return v - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "id": "20", "first_name": "James", @@ -70,3 +69,4 @@ class Config: "gravatar_id": "9a8930a5b20d7048e37740bac5c1ca4f", } } + ) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py b/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py index a99017852a5..f7531346e84 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py @@ -1,12 +1,12 @@ import urllib.parse -from typing import Any, ClassVar, Literal +from typing import Annotated, Any, Literal import packaging.version from models_library.basic_regex import PUBLIC_VARIABLE_NAME_RE from models_library.services import ServiceMetaDataPublished from models_library.services_regex import COMPUTATIONAL_SERVICE_KEY_RE from packaging.version import Version -from pydantic import BaseModel, ConstrainedStr, Extra, Field, HttpUrl +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, StringConstraints from ..api_resources import compose_resource_name from ..basic_types import VersionStr @@ -30,9 +30,9 @@ SOLVER_RESOURCE_NAME_RE = r"^solvers/([^\s/]+)/releases/([\d\.]+)$" -class SolverKeyId(ConstrainedStr): - strip_whitespace = True - regex = COMPUTATIONAL_SERVICE_KEY_RE +SolverKeyId = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=COMPUTATIONAL_SERVICE_KEY_RE) +] class Solver(BaseModel): @@ -46,17 +46,16 @@ class Solver(BaseModel): # Human readables Identifiers title: str = Field(..., description="Human readable name") - description: str | None + description: str | None = None maintainer: str # TODO: consider released: Optional[datetime] required? # TODO: consider version_aliases: list[str] = [] # remaining tags # Get links to other resources url: HttpUrl | None = Field(..., description="Link to get this resource") - - class Config: - extra = Extra.ignore - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="ignore", + json_schema_extra={ "example": { "id": "simcore/services/comp/isolve", "version": "2.1.1", @@ -65,7 +64,8 @@ class Config: "maintainer": "info@itis.swiss", "url": "https://api.osparc.io/v0/solvers/simcore%2Fservices%2Fcomp%2Fisolve/releases/2.1.1", } - } + }, + ) @classmethod def create_from_image(cls, image_meta: ServiceMetaDataPublished) -> "Solver": @@ -114,7 +114,7 @@ class SolverPort(BaseModel): key: str = Field( ..., description="port identifier name", - regex=PUBLIC_VARIABLE_NAME_RE, + pattern=PUBLIC_VARIABLE_NAME_RE, title="Key name", ) kind: PortKindStr @@ -122,10 +122,9 @@ class SolverPort(BaseModel): None, description="jsonschema for the port's value. SEE https://json-schema.org", ) - - class Config: - extra = Extra.ignore - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="ignore", + json_schema_extra={ "example": { "key": "input_2", "kind": "input", @@ -137,4 +136,5 @@ class Config: "maximum": 5, }, } - } + }, + ) diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py index 56a7d648790..43823cf9d36 100644 --- a/services/api-server/src/simcore_service_api_server/services/catalog.py +++ b/services/api-server/src/simcore_service_api_server/services/catalog.py @@ -9,7 +9,7 @@ from fastapi import FastAPI, status from models_library.emails import LowerCaseEmailStr from models_library.services import ServiceMetaDataPublished, ServiceType -from pydantic import Extra, ValidationError, parse_obj_as, parse_raw_as +from pydantic import ConfigDict, ValidationError, parse_obj_as, parse_raw_as from settings_library.catalog import CatalogSettings from simcore_service_api_server.exceptions.backend_errors import ( ListSolversOrStudiesError, @@ -43,9 +43,7 @@ class TruncatedCatalogServiceOut(ServiceMetaDataPublished): """ owner: LowerCaseEmailStr | None - - class Config: - extra = Extra.ignore + model_config = ConfigDict(extra="ignore") # Converters def to_solver(self) -> Solver: diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index ff31490b072..d61a74248f9 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -1,6 +1,5 @@ import logging from functools import partial -from typing import Any, ClassVar from uuid import UUID from fastapi import FastAPI @@ -8,7 +7,15 @@ from models_library.projects_nodes_io import NodeID from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState -from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, PositiveInt, parse_raw_as +from pydantic import ( + AnyHttpUrl, + AnyUrl, + BaseModel, + ConfigDict, + Field, + PositiveInt, + parse_raw_as, +) from simcore_service_api_server.exceptions.backend_errors import ( JobNotFoundError, LogFileNotFoundError, @@ -43,8 +50,8 @@ def guess_progress(self) -> PercentageInt: return PercentageInt(100) return PercentageInt(0) - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { **ComputationTask.Config.schema_extra["examples"][0], @@ -52,6 +59,7 @@ class Config: } ] } + ) class TaskLogFileGet(BaseModel): From a0f289004083102ec59ff46a3e274d2d5a05f9be Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Sep 2024 14:22:56 +0200 Subject: [PATCH 011/280] add type hint --- packages/aws-library/src/aws_library/ec2/_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index 6df4e19b70c..a43d36bc014 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -68,7 +68,7 @@ class EC2InstanceType: # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] -AWSTagKey = Annotated[ +AWSTagKey: TypeAlias = Annotated[ str, StringConstraints( min_length=1, @@ -79,7 +79,7 @@ class EC2InstanceType: # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] # quotes []{} were added as it allows to json encode. it seems to be accepted as a value -AWSTagValue = Annotated[ +AWSTagValue: TypeAlias = Annotated[ str, StringConstraints( min_length=0, From e1ac2c663d574af5db0e9428c8c2e5099b3c2fec Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 17 Sep 2024 14:41:13 +0200 Subject: [PATCH 012/280] continue upgrading --- .../container_tasks/protocol.py | 4 +-- .../utils_folders.py | 6 ++--- .../utils_projects_metadata.py | 14 +++++----- .../src/service_integration/osparc_config.py | 6 +++-- .../src/service_integration/settings.py | 4 ++- .../src/service_integration/versioning.py | 4 +-- .../servicelib/aiohttp/requests_validation.py | 16 +++++++----- .../src/servicelib/docker_utils.py | 12 ++++++--- .../src/servicelib/error_codes.py | 8 +++--- .../servicelib/long_running_tasks/_models.py | 4 ++- .../tests/aiohttp/test_requests_validation.py | 26 +++++++++++-------- .../src/simcore_sdk/node_ports_v2/links.py | 4 ++- .../simcore_sdk/node_ports_v2/nodeports_v2.py | 6 +++-- .../test_node_ports_common_filemanager.py | 4 +-- .../unit/test_node_ports_v2_port_mapping.py | 2 +- 15 files changed, 70 insertions(+), 50 deletions(-) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py index d64cc5f5fd3..3ad8fafdd2f 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py @@ -25,8 +25,8 @@ class TaskOwner(BaseModel): project_id: ProjectID node_id: NodeID - parent_project_id: ProjectID | None = None - parent_node_id: NodeID | None = None + parent_project_id: ProjectID | None + parent_node_id: NodeID | None @property def has_parent(self) -> bool: diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index 31276e404b3..cdcab9c9551 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -295,7 +295,7 @@ def _get_filter_for_enabled_permissions( ### -FolderName = Annotated[ +FolderName: TypeAlias = Annotated[ str, StringConstraints( min_length=1, @@ -324,8 +324,8 @@ class FolderEntry(BaseModel): class _ResolvedAccessRights(BaseModel): folder_id: _FolderID gid: _GroupID - traversal_parent_id: _FolderID | None = None - original_parent_id: _FolderID | None = None + traversal_parent_id: _FolderID | None + original_parent_id: _FolderID | None read: bool write: bool delete: bool diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 480cae0fb19..efda444a68f 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -46,13 +46,13 @@ class DBProjectInvalidParentNodeError(BaseProjectsMetadataError): class ProjectMetadata(BaseModel): - custom: dict[str, Any] | None = None - created: datetime.datetime | None = None - modified: datetime.datetime | None = None - parent_project_uuid: uuid.UUID | None = None - parent_node_id: uuid.UUID | None = None - root_parent_project_uuid: uuid.UUID | None = None - root_parent_node_id: uuid.UUID | None = None + custom: dict[str, Any] | None + created: datetime.datetime | None + modified: datetime.datetime | None + parent_project_uuid: uuid.UUID | None + parent_node_id: uuid.UUID | None + root_parent_project_uuid: uuid.UUID | None + root_parent_node_id: uuid.UUID | None model_config = ConfigDict(frozen=True, from_attributes=True) diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 7d08b71f107..17f9b432a32 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -192,7 +192,7 @@ def ensure_backwards_compatible_setting_type(cls, v): @classmethod def check_value_against_custom_types(cls, v, values): if (type_ := values.get("type_")) and type_ == "ContainerSpec": - ContainerSpec.parse_obj(v) + ContainerSpec.model_validate(v) return v @@ -246,7 +246,9 @@ def ensure_compatibility(cls, v): return v model_config = ConfigDict( - alias_generator=_underscore_as_minus, populate_by_name=True, extra="forbid" + alias_generator=_underscore_as_minus, + populate_by_name=True, + extra="forbid", ) @classmethod diff --git a/packages/service-integration/src/service_integration/settings.py b/packages/service-integration/src/service_integration/settings.py index b2aa86f87a9..f8b977cc9a4 100644 --- a/packages/service-integration/src/service_integration/settings.py +++ b/packages/service-integration/src/service_integration/settings.py @@ -27,7 +27,9 @@ class AppSettings(BaseSettings): COMPOSE_VERSION: str = Field( "3.7", description="version of the docker-compose spec" ) - model_config = SettingsConfigDict(env_file_encoding="utf-8") + model_config = SettingsConfigDict( + env_file_encoding="utf-8", + ) # TODO: load from ~/.osparc/service-integration.json or env file # TODO: add access to secrets diff --git a/packages/service-integration/src/service_integration/versioning.py b/packages/service-integration/src/service_integration/versioning.py index 3b87ce32791..0d7685a818f 100644 --- a/packages/service-integration/src/service_integration/versioning.py +++ b/packages/service-integration/src/service_integration/versioning.py @@ -1,11 +1,11 @@ from datetime import datetime -from typing import Annotated +from typing import Annotated, TypeAlias from models_library.basic_regex import SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS from packaging.version import Version from pydantic import BaseModel, ConfigDict, Field, StringConstraints -SemanticVersionStr = Annotated[ +SemanticVersionStr: TypeAlias = Annotated[ str, StringConstraints(pattern=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS) ] diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 0ec28708532..e5ab75d4d6d 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -14,7 +14,7 @@ from aiohttp import web from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, ConfigDict, ValidationError, parse_obj_as +from pydantic import BaseModel, ConfigDict, TypeAdapter, ValidationError from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from . import status @@ -31,7 +31,9 @@ class RequestParams(BaseModel): class StrictRequestParams(BaseModel): """Use a base class for context, path and query parameters""" - model_config = ConfigDict(extra="forbid") + model_config = ConfigDict( + extra="forbid", + ) @contextmanager @@ -167,8 +169,8 @@ def parse_request_query_parameters_as( ): data = dict(request.query) if hasattr(parameters_schema_cls, "parse_obj"): - return parameters_schema_cls.parse_obj(data) - model: ModelClass = parse_obj_as(parameters_schema_cls, data) + return parameters_schema_cls.model_validate(data) + model: ModelClass = TypeAdapter(parameters_schema_cls).validate_python(data) return model @@ -184,7 +186,7 @@ def parse_request_headers_as( use_error_v1=use_enveloped_error_v1, ): data = dict(request.headers) - return parameters_schema_cls.parse_obj(data) + return parameters_schema_cls.model_validate(data) async def parse_request_body_as( @@ -223,7 +225,7 @@ async def parse_request_body_as( # NOTE: model_schema can be 'list[T]' or 'dict[T]' which raise TypeError # with issubclass(model_schema, BaseModel) assert issubclass(model_schema_cls, BaseModel) # nosec - return model_schema_cls.parse_obj(body) # type: ignore [return-value] + return model_schema_cls.model_validate(body) # type: ignore [return-value] # used for model_schema like 'list[T]' or 'dict[T]' - return parse_obj_as(model_schema_cls, body) + return TypeAdapter(model_schema_cls).validate_python(body) diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index 8ad7a6c6c0c..dfab07283d5 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -40,7 +40,9 @@ class DockerLayerSizeV2(BaseModel): size: ByteSize digest: str model_config = ConfigDict( - frozen=True, alias_generator=snake_to_camel, populate_by_name=True + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, ) @@ -66,7 +68,9 @@ class DockerImageMultiArchManifestsV2(BaseModel): media_type: Literal["application/vnd.oci.image.index.v1+json"] manifests: list[dict[str, Any]] model_config = ConfigDict( - frozen=True, alias_generator=snake_to_camel, populate_by_name=True + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, ) @@ -76,7 +80,9 @@ class _DockerPullImage(BaseModel): progress_detail: ProgressDetail | None = None progress: str | None = None model_config = ConfigDict( - frozen=True, alias_generator=snake_to_camel, populate_by_name=True + frozen=True, + alias_generator=snake_to_camel, + populate_by_name=True, ) diff --git a/packages/service-library/src/servicelib/error_codes.py b/packages/service-library/src/servicelib/error_codes.py index 461f3ee2964..06cd14ac8bc 100644 --- a/packages/service-library/src/servicelib/error_codes.py +++ b/packages/service-library/src/servicelib/error_codes.py @@ -9,11 +9,9 @@ import re -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Annotated -from pydantic import StringConstraints -from pydantic.tools import parse_obj_as -from typing_extensions import Annotated +from pydantic import StringConstraints, TypeAdapter _LABEL = "OEC:{}" _PATTERN = r"OEC:\d+" @@ -27,7 +25,7 @@ def create_error_code(exception: BaseException) -> ErrorCodeStr: - return parse_obj_as(ErrorCodeStr, _LABEL.format(id(exception))) + return TypeAdapter(ErrorCodeStr).validate_python(_LABEL.format(id(exception))) def parse_error_code(obj) -> set[ErrorCodeStr]: diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index 87fceb3b3e0..fc240160b81 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -46,7 +46,9 @@ class TrackedTask(BaseModel): "polled by the client who created it" ), ) - model_config = ConfigDict(arbitrary_types_allowed=True) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) class ClientConfiguration(BaseModel): diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index c3320668bc9..87cbf3f4d2a 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -41,7 +41,9 @@ def create_fake(cls, faker: Faker): class MyRequestPathParams(BaseModel): project_uuid: UUID - model_config = ConfigDict(extra="forbid") + model_config = ConfigDict( + extra="forbid", + ) @classmethod def create_fake(cls, faker: Faker): @@ -64,7 +66,9 @@ def create_fake(cls, faker: Faker): class MyRequestHeadersParams(BaseModel): user_agent: str = Field(alias="X-Simcore-User-Agent") optional_header: str | None = Field(default=None, alias="X-Simcore-Optional-Header") - model_config = ConfigDict(populate_by_name=False) + model_config = ConfigDict( + populate_by_name=False, + ) @classmethod def create_fake(cls, faker: Faker): @@ -190,21 +194,21 @@ async def test_parse_request_as( r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_200_OK, f"{await r.text()}" got = await r.json() - assert got["parameters"] == jsonable_encoder(path_params.dict()) - assert got["queries"] == jsonable_encoder(query_params.dict()) - assert got["body"] == body.dict() + assert got["parameters"] == jsonable_encoder(path_params.model_dump()) + assert got["queries"] == jsonable_encoder(query_params.model_dump()) + assert got["body"] == body.model_dump() assert got["context"] == { "secret": client.app[APP_SECRET_KEY], "user_id": 42, } - assert got["headers"] == jsonable_encoder(headers_params.dict()) + assert got["headers"] == jsonable_encoder(headers_params.model_dump()) async def test_parse_request_with_invalid_path_params( @@ -248,8 +252,8 @@ async def test_parse_request_with_invalid_query_params( r = await client.get( f"/projects/{path_params.project_uuid}", params={}, - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -280,7 +284,7 @@ async def test_parse_request_with_invalid_body( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), json={"invalid": "body"}, - headers=headers_params.dict(by_alias=True), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py index 1ec86b7e8db..ad94884c3b0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py @@ -22,7 +22,9 @@ class PortLink(BasePortLink): class FileLink(BaseFileLink): """allow all kind of file links""" - model_config = ConfigDict(extra="allow") + model_config = ConfigDict( + extra="allow", + ) # TODO: needs to be in sync with project_nodes.InputTypes and project_nodes.OutputTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 02a6fd0a0a8..23b60399d7b 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -45,9 +45,11 @@ class Nodeports(BaseModel): ] auto_update: bool = False r_clone_settings: RCloneSettings | None = None - io_log_redirect_cb: LogRedirectCB | None = None + io_log_redirect_cb: LogRedirectCB | None aws_s3_cli_settings: AwsS3CliSettings | None = None - model_config = ConfigDict(arbitrary_types_allowed=True) + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) def __init__(self, **data: Any): super().__init__(**data) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 81b7affc252..56f696bb46d 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -42,8 +42,8 @@ class _SyncSettings(BaseModel): - r_clone_settings: RCloneSettings | None = None - aws_s3_cli_settings: AwsS3CliSettings | None = None + r_clone_settings: RCloneSettings | None + aws_s3_cli_settings: AwsS3CliSettings | None @pytest.fixture( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py index 1af6645042a..28cb01894a1 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py @@ -84,7 +84,7 @@ def fake_port_meta() -> dict[str, Any]: ) port_model = ServiceInput.from_json_schema(port_schema=schema) - return port_model.dict(exclude_unset=True, by_alias=True) + return port_model.model_dump(exclude_unset=True, by_alias=True) def test_validate_port_value_against_schema(fake_port_meta: dict[str, Any]): From 7f3801a2fea38d12da3cfe1330f03d954154e582 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 18 Sep 2024 13:45:35 +0200 Subject: [PATCH 013/280] continue upgrading --- .../src/settings_library/basic_types.py | 19 +++++++------------ .../src/settings_library/comp_services.py | 7 +++---- .../src/settings_library/email.py | 6 +++--- .../src/settings_library/node_ports.py | 8 ++++---- .../src/settings_library/postgres.py | 9 +++++---- .../src/settings_library/twilio.py | 15 +++++---------- 6 files changed, 27 insertions(+), 37 deletions(-) diff --git a/packages/settings-library/src/settings_library/basic_types.py b/packages/settings-library/src/settings_library/basic_types.py index 277832669e1..d912b5d4e39 100644 --- a/packages/settings-library/src/settings_library/basic_types.py +++ b/packages/settings-library/src/settings_library/basic_types.py @@ -3,21 +3,17 @@ # This is a minor evil to avoid the maintenance burden that creates # an extra dependency to a larger models_library (intra-repo library) -import re from enum import Enum +from typing import Annotated, TypeAlias -from pydantic import ConstrainedInt, ConstrainedStr - +from pydantic import Field, StringConstraints # port number range -class PortInt(ConstrainedInt): - gt = 0 - lt = 65535 +PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)] # e.g. 'v5' -class VersionTag(ConstrainedStr): - regex = re.compile(r"^v\d$") +VersionTag: TypeAlias = Annotated[str, StringConstraints(pattern=r"^v\d$")] class LogLevel(str, Enum): @@ -55,7 +51,6 @@ class BuildTargetEnum(str, Enum): # non-empty bounded string used as identifier # e.g. "123" or "name_123" or "fa327c73-52d8-462a-9267-84eeaf0f90e3" but NOT "" -class IDStr(ConstrainedStr): - strip_whitespace = True - min_length = 1 - max_length = 50 +IDStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1, max_length=50) +] diff --git a/packages/settings-library/src/settings_library/comp_services.py b/packages/settings-library/src/settings_library/comp_services.py index 7167335c4be..71901e61624 100644 --- a/packages/settings-library/src/settings_library/comp_services.py +++ b/packages/settings-library/src/settings_library/comp_services.py @@ -1,5 +1,4 @@ -from pydantic import ByteSize, NonNegativeInt, field_validator -from pydantic.tools import parse_raw_as +from pydantic import ByteSize, NonNegativeInt, TypeAdapter, field_validator from settings_library.base import BaseCustomSettings from ._constants import GB @@ -10,8 +9,8 @@ class ComputationalServices(BaseCustomSettings): DEFAULT_MAX_NANO_CPUS: NonNegativeInt = _DEFAULT_MAX_NANO_CPUS_VALUE - DEFAULT_MAX_MEMORY: ByteSize = parse_raw_as( - ByteSize, f"{_DEFAULT_MAX_MEMORY_VALUE}" + DEFAULT_MAX_MEMORY: ByteSize = TypeAdapter(ByteSize).validate_python( + f"{_DEFAULT_MAX_MEMORY_VALUE}" ) DEFAULT_RUNTIME_TIMEOUT: NonNegativeInt = 0 diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index b15bf209405..9a8a9d026bf 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import root_validator +from pydantic import model_validator from pydantic.fields import Field from pydantic.types import SecretStr @@ -31,7 +31,7 @@ class SMTPSettings(BaseCustomSettings): SMTP_USERNAME: str | None = Field(None, min_length=1) SMTP_PASSWORD: SecretStr | None = Field(None, min_length=1) - @root_validator + @model_validator(mode="after") @classmethod def _both_credentials_must_be_set(cls, values): username = values.get("SMTP_USERNAME") @@ -43,7 +43,7 @@ def _both_credentials_must_be_set(cls, values): return values - @root_validator + @model_validator(mode="after") @classmethod def _enabled_tls_required_authentication(cls, values): smtp_protocol = values.get("SMTP_PROTOCOL") diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index 2a5d12f1bd7..70f90968bf5 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -1,7 +1,7 @@ from datetime import timedelta from typing import Final -from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr, root_validator +from pydantic import Field, NonNegativeInt, PositiveInt, SecretStr, model_validator from .base import BaseCustomSettings from .postgres import PostgresSettings @@ -21,7 +21,7 @@ def auth_required(self) -> bool: # for details see https://github.com/ITISFoundation/osparc-issues/issues/1264 return self.STORAGE_USERNAME is not None and self.STORAGE_PASSWORD is not None - @root_validator + @model_validator(mode="after") @classmethod def _validate_auth_fields(cls, values): username = values["STORAGE_USERNAME"] @@ -33,9 +33,9 @@ def _validate_auth_fields(cls, values): class NodePortsSettings(BaseCustomSettings): - NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field(auto_default_from_env=True) + NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field(auto_default_from_env=True) # type: ignore[call-arg] - POSTGRES_SETTINGS: PostgresSettings = Field(auto_default_from_env=True) + POSTGRES_SETTINGS: PostgresSettings = Field(auto_default_from_env=True) # type: ignore[call-arg] NODE_PORTS_MULTIPART_UPLOAD_COMPLETION_TIMEOUT_S: NonNegativeInt = int( timedelta(minutes=5).total_seconds() diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index b3b122480f1..6fcefd1fb8e 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,7 +1,8 @@ import urllib.parse from functools import cached_property -from pydantic import Field, PostgresDsn, SecretStr, field_validator +from pydantic import AliasChoices, Field, PostgresDsn, SecretStr, field_validator +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import PortInt @@ -30,12 +31,12 @@ class PostgresSettings(BaseCustomSettings): POSTGRES_CLIENT_NAME: str | None = Field( default=None, description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", - validation_alias=[ + validation_alias=AliasChoices( "POSTGRES_CLIENT_NAME", # This is useful when running inside a docker container, then the hostname is set each client gets a different name "HOST", "HOSTNAME", - ], + ), ) # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. @@ -82,7 +83,7 @@ def dsn_with_query(self) -> str: ) return dsn - model_config = ConfigDict( + model_config = SettingsConfigDict( json_schema_extra={ "examples": [ # minimal required diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index d6284e6b8d0..aac1d9028e0 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -6,27 +6,22 @@ """ -import re -from re import Pattern +from typing import Annotated, TypeAlias -from pydantic import ConfigDict, ConstrainedStr, Field, parse_obj_as +from pydantic import Field, StringConstraints, TypeAdapter from .base import BaseCustomSettings -class CountryCodeStr(ConstrainedStr): - # Based on https://countrycode.org/ - strip_whitespace: bool = True - regex: Pattern[str] | None = re.compile(r"^\d{1,4}") - model_config = ConfigDict(frozen=True) +# Based on https://countrycode.org/ +CountryCodeStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True, pattern=r"^\d{1,4}")] class TwilioSettings(BaseCustomSettings): TWILIO_ACCOUNT_SID: str = Field(..., description="Twilio account String Identifier") TWILIO_AUTH_TOKEN: str = Field(..., description="API tokens") TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT: list[CountryCodeStr] = Field( - default=parse_obj_as( - list[CountryCodeStr], + default=TypeAdapter(list[CountryCodeStr]).validate_python( [ "41", ], From a470e9e4c08e25dc9cba67db8d6d042916354693 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 18 Sep 2024 14:42:40 +0200 Subject: [PATCH 014/280] continue upgrading --- packages/settings-library/src/settings_library/postgres.py | 2 -- packages/settings-library/src/settings_library/utils_cli.py | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 6fcefd1fb8e..c79ec1231fa 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -39,8 +39,6 @@ class PostgresSettings(BaseCustomSettings): ), ) - # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. - # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. @field_validator("POSTGRES_MAXSIZE") @classmethod def _check_size(cls, v, values): diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 79d0e1ac145..844dffc347c 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -7,7 +7,7 @@ import rich import typer from pydantic import ValidationError -from pydantic.env_settings import BaseSettings +from pydantic_settings import BaseSettings from ._constants import HEADER_STR from .base import BaseCustomSettings @@ -39,7 +39,7 @@ def print_as_envfile( if isinstance(value, BaseSettings): if compact: - value = f"'{value.json(**pydantic_export_options)}'" # flat + value = f"'{value.model_dump_json(**pydantic_export_options)}'" # flat else: if verbose: typer.echo(f"\n# --- {field.name} --- ") From 8652f39f3adb22da56d33199403361110af7c75c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 18 Sep 2024 16:28:46 +0200 Subject: [PATCH 015/280] update base requirements --- packages/service-integration/requirements/_base.txt | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index f05f3b49316..eb2fdda677a 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -73,14 +73,22 @@ pydantic==2.9.1 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in + # pydantic-extra-types + # pydantic-settings pydantic-core==2.23.3 # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/models-library/requirements/_base.in pygments==2.18.0 # via rich pytest==8.3.2 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings python-slugify==8.0.4 # via cookiecutter pyyaml==6.0.2 From 7b3c39e0016700a86abd090fa9991f28d76db2e4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 18 Sep 2024 16:37:35 +0200 Subject: [PATCH 016/280] continue upgrading --- .../_compose_spec_model_autogenerated.py | 32 +++++++++---------- .../src/service_integration/osparc_config.py | 1 - .../tests/test_osparc_config.py | 3 +- 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py index b762def3b68..a0a5f295402 100644 --- a/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py +++ b/packages/service-integration/src/service_integration/_compose_spec_model_autogenerated.py @@ -5,9 +5,9 @@ # type:ignore from enum import Enum -from typing import Any +from typing import Any, TypeAlias -from pydantic import BaseModel, ConfigDict, ConstrainedInt, Field, StringConstraints +from pydantic import BaseModel, ConfigDict, Field, RootModel, StringConstraints from typing_extensions import Annotated # MODIFICATIONS ------------------------------------------------------------------------- @@ -20,9 +20,7 @@ # UserWarning: format of 'subnet_ip_address' not understood for 'string' - using default # port number range -class PortInt(ConstrainedInt): - gt = 0 - lt = 65535 +PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)] # ---------------------------------------------------------------------------------------- @@ -227,8 +225,8 @@ class GenericResource(BaseModel): discrete_resource_spec: DiscreteResourceSpec | None = None -class GenericResources(BaseModel): - __root__: list[GenericResource] +class GenericResources(RootModel): + root: list[GenericResource] class ConfigItem(BaseModel): @@ -270,12 +268,12 @@ class External3(BaseModel): name: str | None = None -class ListOfStrings(BaseModel): - __root__: list[str] +class ListOfStrings(RootModel): + root: list[str] -class ListOrDict(BaseModel): - __root__: ( +class ListOrDict(RootModel): + root: ( dict[ Annotated[str, StringConstraints(pattern=r".+")], str | float | bool | None ] @@ -297,8 +295,8 @@ class BlkioWeight(BaseModel): weight: int | None = None -class Constraints(BaseModel): - __root__: Any = None +class Constraints(RootModel): + root: Any = None class BuildItem(BaseModel): @@ -347,8 +345,8 @@ class Device(BaseModel): options: ListOrDict | None = None -class Devices(BaseModel): - __root__: list[Device] +class Devices(RootModel): + root: list[Device] class Network(BaseModel): @@ -403,8 +401,8 @@ class ComposeSpecConfig(BaseModel): template_driver: str | None = None -class StringOrList(BaseModel): - __root__: str | ListOfStrings +class StringOrList(RootModel): + root: str | ListOfStrings class Reservations(BaseModel): diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 17f9b432a32..975109a87e2 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -43,7 +43,6 @@ field_validator, model_validator, ) -from pydantic.class_validators import field_validator from pydantic.fields import Field from pydantic.main import BaseModel diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index e993bc25392..b258f2fc4c8 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -72,7 +72,8 @@ def test_load_from_labels( @pytest.mark.parametrize( - "example_data", SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"] + "example_data", + SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"], ) def test_settings_item_in_sync_with_service_settings_label( example_data: dict[str, Any] From 2fe2b81e041dbfdf61188d11085a88967e457473 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 18 Sep 2024 22:41:27 +0200 Subject: [PATCH 017/280] replace deprecated parse_obj methods --- .../scripts/validate-pg-projects.py | 2 +- .../function_services_catalog/_utils.py | 8 ++--- .../services/demo_units.py | 2 +- .../services/file_picker.py | 2 +- .../services/iter_range.py | 2 +- .../services/iter_sensitivity.py | 2 +- .../services/nodes_group.py | 2 +- .../services/parameters.py | 4 +-- .../services/probes.py | 6 ++-- .../models_library/rest_pagination_utils.py | 2 +- .../src/models_library/rpc_pagination.py | 2 +- .../src/models_library/services_io.py | 4 +-- .../tests/test__models_examples.py | 2 +- .../tests/test_api_schemas_catalog.py | 4 +-- .../test_api_schemas_webserver_projects.py | 2 +- packages/models-library/tests/test_docker.py | 4 ++- .../models-library/tests/test_generics.py | 2 +- .../tests/test_project_networks.py | 6 ++-- .../models-library/tests/test_projects.py | 4 +-- .../tests/test_projects_state.py | 4 +-- .../tests/test_rest_pagination_utils.py | 6 ++-- .../tests/test_service_settings_labels.py | 8 ++--- .../models-library/tests/test_services_io.py | 2 +- .../tests/test_user_preferences.py | 4 +-- .../tests/test_utils_common_validators.py | 32 +++++++++---------- .../tests/test_utils_service_io.py | 4 +-- 26 files changed, 62 insertions(+), 60 deletions(-) diff --git a/packages/models-library/scripts/validate-pg-projects.py b/packages/models-library/scripts/validate-pg-projects.py index 978e32cfc6f..08c34925faa 100644 --- a/packages/models-library/scripts/validate-pg-projects.py +++ b/packages/models-library/scripts/validate-pg-projects.py @@ -59,7 +59,7 @@ def validate_csv_exported_pg_project( pid = row.get("uuid", index + 1) try: - model = ProjectFromCsv.parse_obj(row) + model = ProjectFromCsv.model_validate(row) if verbose > 1: typer.secho(f"{pid} OK", fg=typer.colors.GREEN) diff --git a/packages/models-library/src/models_library/function_services_catalog/_utils.py b/packages/models-library/src/models_library/function_services_catalog/_utils.py index 4cd1275b5e0..a58a524d094 100644 --- a/packages/models-library/src/models_library/function_services_catalog/_utils.py +++ b/packages/models-library/src/models_library/function_services_catalog/_utils.py @@ -14,10 +14,10 @@ "email": "unknown@osparc.io", "affiliation": "unknown", } -EN = Author.parse_obj(AUTHORS.get("EN", _DEFAULT)) -OM = Author.parse_obj(AUTHORS.get("OM", _DEFAULT)) -PC = Author.parse_obj(AUTHORS.get("PC", _DEFAULT)) -WVG = Author.parse_obj(AUTHORS.get("WVG", _DEFAULT)) +EN = Author.model_validate(AUTHORS.get("EN", _DEFAULT)) +OM = Author.model_validate(AUTHORS.get("OM", _DEFAULT)) +PC = Author.model_validate(AUTHORS.get("PC", _DEFAULT)) +WVG = Author.model_validate(AUTHORS.get("WVG", _DEFAULT)) def create_fake_thumbnail_url(label: str) -> str: diff --git a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py index 298ac02c82b..44bd30e0899 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/demo_units.py @@ -15,7 +15,7 @@ # If this assumption cannot be guaranteed anymore the test must be updated. # -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/demo-units", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py index 0e0554842fb..2245a8ba3ff 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/file_picker.py @@ -8,7 +8,7 @@ from .._key_labels import FUNCTION_SERVICE_KEY_PREFIX from .._utils import OM, FunctionServices -META: Final = ServiceMetaDataPublished.parse_obj( +META: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/file-picker", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py index 662cbf327cf..d59e37735e8 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_range.py @@ -12,7 +12,7 @@ def create_metadata( ) -> ServiceMetaDataPublished: prefix = prefix or type_name LABEL = f"{type_name.capitalize()} iterator" - return ServiceMetaDataPublished.parse_obj( + return ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/{prefix}-range", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py index f0199389885..a2be976c651 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/iter_sensitivity.py @@ -13,7 +13,7 @@ LIST_NUMBERS_SCHEMA: dict[str, Any] = schema_of(list[float], title="list[number]") -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/sensitivity", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py index bfde87e52c3..40adb28f342 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/nodes_group.py @@ -7,7 +7,7 @@ # NOTE: DO not mistake with simcore/services/frontend/nodes-group/macros/ # which needs to be redefined. # -META = ServiceMetaDataPublished.parse_obj( +META = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/nodes-group", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py index e0e25b6ee11..d62a4a88dfb 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/parameters.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/parameters.py @@ -12,7 +12,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: This is a parametrized node (or param-node in short) """ - meta = ServiceMetaDataPublished.parse_obj( + meta = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/{type_name}", @@ -45,7 +45,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: META_BOOL: Final = _create_metadata(type_name="boolean") META_INT: Final = _create_metadata(type_name="integer") META_STR: Final = _create_metadata(type_name="string") -META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/parameter/array", diff --git a/packages/models-library/src/models_library/function_services_catalog/services/probes.py b/packages/models-library/src/models_library/function_services_catalog/services/probes.py index e736efb2fb1..4c710a90ade 100644 --- a/packages/models-library/src/models_library/function_services_catalog/services/probes.py +++ b/packages/models-library/src/models_library/function_services_catalog/services/probes.py @@ -7,7 +7,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: - obj: ServiceMetaDataPublished = ServiceMetaDataPublished.parse_obj( + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/{type_name}", @@ -38,7 +38,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: META_BOOL: Final = _create_metadata("boolean") META_INT: Final = _create_metadata("integer") META_STR: Final = _create_metadata("string") -META_ARRAY: Final = ServiceMetaDataPublished.parse_obj( +META_ARRAY: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/array", @@ -67,7 +67,7 @@ def _create_metadata(type_name: str) -> ServiceMetaDataPublished: } ) -META_FILE: Final = ServiceMetaDataPublished.parse_obj( +META_FILE: Final = ServiceMetaDataPublished.model_validate( { "integration-version": LATEST_INTEGRATION_VERSION, "key": f"{FUNCTION_SERVICE_KEY_PREFIX}/iterator-consumer/probe/file", diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index 8d901d50f30..c303ee8e351 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -61,7 +61,7 @@ def paginate_data( Usage: obj: PageDict = paginate_data( ... ) - model = Page[MyModelItem].parse_obj(obj) + model = Page[MyModelItem].model_validate(obj) raises ValidationError """ diff --git a/packages/models-library/src/models_library/rpc_pagination.py b/packages/models-library/src/models_library/rpc_pagination.py index 0ec454cc9fd..92470b30d67 100644 --- a/packages/models-library/src/models_library/rpc_pagination.py +++ b/packages/models-library/src/models_library/rpc_pagination.py @@ -31,7 +31,7 @@ class PageRefsParams(PageRefs[PageQueryParameters]): @classmethod def create(cls, total: int, limit: int, offset: int) -> "PageRefsParams": last_page = ceil(total / limit) - 1 - return cls.parse_obj( + return cls.model_validate( { "self": {"offset": offset, "limit": limit}, "first": {"offset": 0, "limit": limit}, diff --git a/packages/models-library/src/models_library/services_io.py b/packages/models-library/src/models_library/services_io.py index 49264f19799..6f1acd3d494 100644 --- a/packages/models-library/src/models_library/services_io.py +++ b/packages/models-library/src/models_library/services_io.py @@ -211,7 +211,7 @@ class ServiceInput(BaseServiceIOModel): def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceInput": """Creates input port model from a json-schema""" data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) + return cls.model_validate(data) class ServiceOutput(BaseServiceIOModel): @@ -258,4 +258,4 @@ class ServiceOutput(BaseServiceIOModel): def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceOutput": """Creates output port model from a json-schema""" data = cls._from_json_schema_base_implementation(port_schema) - return cls.parse_obj(data) + return cls.model_validate(data) diff --git a/packages/models-library/tests/test__models_examples.py b/packages/models-library/tests/test__models_examples.py index 12809db713b..2345b5451f1 100644 --- a/packages/models-library/tests/test__models_examples.py +++ b/packages/models-library/tests/test__models_examples.py @@ -14,6 +14,6 @@ def test_all_models_library_models_config_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/packages/models-library/tests/test_api_schemas_catalog.py b/packages/models-library/tests/test_api_schemas_catalog.py index 0c815d7bd0c..20d107fd2b0 100644 --- a/packages/models-library/tests/test_api_schemas_catalog.py +++ b/packages/models-library/tests/test_api_schemas_catalog.py @@ -9,7 +9,7 @@ def test_service_port_with_file(): - io = ServiceInput.parse_obj( + io = ServiceInput.model_validate( { "displayOrder": 1, "label": "Input files", @@ -39,7 +39,7 @@ def test_service_port_with_file(): def test_service_port_with_boolean(): - io = ServiceInput.parse_obj( + io = ServiceInput.model_validate( { "displayOrder": 3, "label": "Same title and description is more usual than you might think", diff --git a/packages/models-library/tests/test_api_schemas_webserver_projects.py b/packages/models-library/tests/test_api_schemas_webserver_projects.py index b8e4fcbdc47..ea67927fd87 100644 --- a/packages/models-library/tests/test_api_schemas_webserver_projects.py +++ b/packages/models-library/tests/test_api_schemas_webserver_projects.py @@ -34,7 +34,7 @@ ids=lambda c: c.name, ) def test_create_project_schemas(api_call: HttpApiCallCapture): - request_payload = ProjectCreateNew.parse_obj(api_call.request_payload) + request_payload = ProjectCreateNew.model_validate(api_call.request_payload) assert request_payload response_body = parse_obj_as( diff --git a/packages/models-library/tests/test_docker.py b/packages/models-library/tests/test_docker.py index 55cb9419bbc..87dc86f504f 100644 --- a/packages/models-library/tests/test_docker.py +++ b/packages/models-library/tests/test_docker.py @@ -107,7 +107,9 @@ def test_docker_generic_tag(image_name: str, valid: bool): ids=str, ) def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): - simcore_service_docker_label_keys = StandardSimcoreDockerLabels.parse_obj(obj_data) + simcore_service_docker_label_keys = StandardSimcoreDockerLabels.model_validate( + obj_data + ) exported_dict = simcore_service_docker_label_keys.to_simcore_runtime_docker_labels() assert all( isinstance(v, str) for v in exported_dict.values() diff --git a/packages/models-library/tests/test_generics.py b/packages/models-library/tests/test_generics.py index b778cd4a490..5b7e0b4d610 100644 --- a/packages/models-library/tests/test_generics.py +++ b/packages/models-library/tests/test_generics.py @@ -20,7 +20,7 @@ def test_dict_base_model(): "another key": "a string value", "yet another key": Path("some_path"), } - some_instance = DictModel[str, Any].parse_obj(some_dict) + some_instance = DictModel[str, Any].model_validate(some_dict) assert some_instance # test some typical dict methods diff --git a/packages/models-library/tests/test_project_networks.py b/packages/models-library/tests/test_project_networks.py index c91f0503a8e..0874cf5bd71 100644 --- a/packages/models-library/tests/test_project_networks.py +++ b/packages/models-library/tests/test_project_networks.py @@ -19,7 +19,7 @@ ], ) def test_networks_with_aliases_ok(valid_example: dict) -> None: - assert NetworksWithAliases.parse_obj(valid_example) + assert NetworksWithAliases.model_validate(valid_example) @pytest.mark.parametrize( @@ -39,7 +39,7 @@ def test_networks_with_aliases_ok(valid_example: dict) -> None: ) def test_networks_with_aliases_fail(invalid_example: dict) -> None: with pytest.raises(ValidationError): - assert NetworksWithAliases.parse_obj(invalid_example) + assert NetworksWithAliases.model_validate(invalid_example) @pytest.mark.parametrize("network_name", ["a", "ok", "a_", "A_", "a1", "a-"]) @@ -58,7 +58,7 @@ def test_projects_networks_validation_fails(network_name: str) -> None: def test_class_constructors_fail() -> None: with pytest.raises(ValidationError): - NetworksWithAliases.parse_obj( + NetworksWithAliases.model_validate( { "ok-netowrk_naeme": { UUID( diff --git a/packages/models-library/tests/test_projects.py b/packages/models-library/tests/test_projects.py index 8b646345c2d..5cbb0e13573 100644 --- a/packages/models-library/tests/test_projects.py +++ b/packages/models-library/tests/test_projects.py @@ -28,7 +28,7 @@ def minimal_project(faker: Faker) -> dict[str, Any]: def test_project_minimal_model(minimal_project: dict[str, Any]): - project = Project.parse_obj(minimal_project) + project = Project.model_validate(minimal_project) assert project assert project.thumbnail is None @@ -37,7 +37,7 @@ def test_project_minimal_model(minimal_project: dict[str, Any]): def test_project_with_thumbnail_as_empty_string(minimal_project: dict[str, Any]): thumbnail_empty_string = deepcopy(minimal_project) thumbnail_empty_string.update({"thumbnail": ""}) - project = Project.parse_obj(thumbnail_empty_string) + project = Project.model_validate(thumbnail_empty_string) assert project assert project.thumbnail is None diff --git a/packages/models-library/tests/test_projects_state.py b/packages/models-library/tests/test_projects_state.py index 2895d71f3a1..236d65a5538 100644 --- a/packages/models-library/tests/test_projects_state.py +++ b/packages/models-library/tests/test_projects_state.py @@ -5,7 +5,7 @@ def test_project_locked_with_missing_owner_raises(): with pytest.raises(ValueError): ProjectLocked(value=True, status=ProjectStatus.OPENED) - ProjectLocked.parse_obj({"value": False, "status": ProjectStatus.OPENED}) + ProjectLocked.model_validate({"value": False, "status": ProjectStatus.OPENED}) @pytest.mark.parametrize( @@ -19,4 +19,4 @@ def test_project_locked_with_missing_owner_raises(): ) def test_project_locked_with_allowed_values(lock: bool, status: ProjectStatus): with pytest.raises(ValueError): - ProjectLocked.parse_obj({"value": lock, "status": status}) + ProjectLocked.model_validate({"value": lock, "status": status}) diff --git a/packages/models-library/tests/test_rest_pagination_utils.py b/packages/models-library/tests/test_rest_pagination_utils.py index f9887a1bf71..1fc466f5f8c 100644 --- a/packages/models-library/tests/test_rest_pagination_utils.py +++ b/packages/models-library/tests/test_rest_pagination_utils.py @@ -41,7 +41,7 @@ def test_paginating_data(base_url): ) assert data_obj - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( total=total_number_of_items, count=len(data_chunk), limit=limit, offset=offset @@ -83,7 +83,7 @@ def test_paginating_data(base_url): offset=offset, ) - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( total=total_number_of_items, @@ -136,7 +136,7 @@ def test_paginating_data(base_url): ) assert data_obj - model_instance = Page[int].parse_obj(data_obj) + model_instance = Page[int].model_validate(data_obj) assert model_instance assert model_instance.meta == PageMetaInfoLimitOffset( diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index f9c096804cc..d91b6c7071d 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -74,7 +74,7 @@ def test_simcore_service_labels(example: dict, items: int, uses_dynamic_sidecar: def test_service_settings(): - simcore_settings_settings_label = SimcoreServiceSettingsLabel.parse_obj( + simcore_settings_settings_label = SimcoreServiceSettingsLabel.model_validate( SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"] ) assert simcore_settings_settings_label @@ -520,7 +520,7 @@ def test_can_parse_labels_with_osparc_identifiers( vendor_environments: dict[str, Any], service_labels: dict[str, str] ): # can load OSPARC_VARIABLE_ identifiers!! - service_meta = SimcoreServiceLabels.parse_obj(service_labels) + service_meta = SimcoreServiceLabels.model_validate(service_labels) assert service_meta.containers_allowed_outgoing_permit_list nat_rule: NATRule = service_meta.containers_allowed_outgoing_permit_list[ @@ -568,7 +568,7 @@ def test_resolving_some_service_labels_at_load_time( vendor_environments: dict[str, Any], service_labels: dict[str, str] ): print(json.dumps(service_labels, indent=1)) - service_meta = SimcoreServiceLabels.parse_obj(service_labels) + service_meta = SimcoreServiceLabels.model_validate(service_labels) # NOTE: replacing all OsparcVariableIdentifier instances nested inside objects # this also does a partial replacement if there is no entry inside the vendor_environments @@ -593,7 +593,7 @@ def test_resolving_some_service_labels_at_load_time( # NOTE: that this model needs all values to be resolved before parsing them # otherwise it might fail!! The question is whether these values can be resolved at this point # NOTE: vendor values are in the database and therefore are available at this point - labels = SimcoreServiceLabels.parse_obj(service_labels) + labels = SimcoreServiceLabels.model_validate(service_labels) print("After", labels.model_dump_json(indent=1)) formatted_json = service_meta.model_dump_json(indent=1) diff --git a/packages/models-library/tests/test_services_io.py b/packages/models-library/tests/test_services_io.py index acfb02a05b1..e056647665f 100644 --- a/packages/models-library/tests/test_services_io.py +++ b/packages/models-library/tests/test_services_io.py @@ -15,7 +15,7 @@ def test_service_port_units(tests_data_dir: Path): data = yaml.safe_load((tests_data_dir / "metadata-sleeper-2.0.2.yaml").read_text()) print(ServiceMetaDataPublished.schema_json(indent=2)) - service_meta = ServiceMetaDataPublished.parse_obj(data) + service_meta = ServiceMetaDataPublished.model_validate(data) assert service_meta.inputs for input_nameid, input_meta in service_meta.inputs.items(): diff --git a/packages/models-library/tests/test_user_preferences.py b/packages/models-library/tests/test_user_preferences.py index 272e73cf6e5..59ffb5a4c27 100644 --- a/packages/models-library/tests/test_user_preferences.py +++ b/packages/models-library/tests/test_user_preferences.py @@ -96,7 +96,7 @@ def unregister_defined_classes() -> Iterator[None]: def test__frontend__user_preference(value: Any, unregister_defined_classes: None): - pref1 = FrontendUserPreference.parse_obj( + pref1 = FrontendUserPreference.model_validate( {"preference_identifier": "pref_id", "value": value} ) assert isinstance(pref1, FrontendUserPreference) @@ -112,7 +112,7 @@ def test__user_service__user_preference( mock_file_path: Path, unregister_defined_classes: None, ): - pref1 = UserServiceUserPreference.parse_obj( + pref1 = UserServiceUserPreference.model_validate( { "value": value, "service_key": service_key, diff --git a/packages/models-library/tests/test_utils_common_validators.py b/packages/models-library/tests/test_utils_common_validators.py index d4c7cb5409f..db9df708b0f 100644 --- a/packages/models-library/tests/test_utils_common_validators.py +++ b/packages/models-library/tests/test_utils_common_validators.py @@ -48,8 +48,8 @@ class Model(BaseModel): empty_str_to_none_pre_validator ) - model = Model.parse_obj({"nullable_message": None}) - assert model == Model.parse_obj({"nullable_message": ""}) + model = Model.model_validate({"nullable_message": None}) + assert model == Model.model_validate({"nullable_message": ""}) def test_none_to_empty_str_pre_validator(): @@ -60,8 +60,8 @@ class Model(BaseModel): none_to_empty_str_pre_validator ) - model = Model.parse_obj({"message": ""}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": ""}) + assert model == Model.model_validate({"message": None}) def test_null_or_none_str_to_none_validator(): @@ -72,20 +72,20 @@ class Model(BaseModel): null_or_none_str_to_none_validator ) - model = Model.parse_obj({"message": "none"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "none"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "null"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "null"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "NoNe"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "NoNe"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": "NuLl"}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": "NuLl"}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": None}) - assert model == Model.parse_obj({"message": None}) + model = Model.model_validate({"message": None}) + assert model == Model.model_validate({"message": None}) - model = Model.parse_obj({"message": ""}) - assert model == Model.parse_obj({"message": ""}) + model = Model.model_validate({"message": ""}) + assert model == Model.model_validate({"message": ""}) diff --git a/packages/models-library/tests/test_utils_service_io.py b/packages/models-library/tests/test_utils_service_io.py index 51e4324e2be..15ea8cfcae8 100644 --- a/packages/models-library/tests/test_utils_service_io.py +++ b/packages/models-library/tests/test_utils_service_io.py @@ -32,11 +32,11 @@ def service_port(request: pytest.FixtureRequest) -> ServiceInput | ServiceOutput try: index = example_inputs_labels.index(request.param) example = ServiceInput.model_config["json_schema_extra"]["examples"][index] - return ServiceInput.parse_obj(example) + return ServiceInput.model_validate(example) except ValueError: index = example_outputs_labels.index(request.param) example = ServiceOutput.model_config["json_schema_extra"]["examples"][index] - return ServiceOutput.parse_obj(example) + return ServiceOutput.model_validate(example) def test_get_schema_from_port(service_port: ServiceInput | ServiceOutput): From f32088363dae349992441316ecc942eb6efbc69b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 18 Sep 2024 23:22:42 +0200 Subject: [PATCH 018/280] replace deprecated dict methods --- packages/models-library/src/models_library/docker.py | 6 ++++-- packages/models-library/src/models_library/utils/nodes.py | 2 +- packages/models-library/tests/test__pydantic_models.py | 6 +++--- packages/models-library/tests/test_api_schemas_catalog.py | 4 ++-- packages/models-library/tests/test_generics.py | 8 +++++--- packages/models-library/tests/test_project_nodes.py | 4 ++-- .../models-library/tests/test_service_settings_labels.py | 2 +- packages/models-library/tests/test_sidecar_volumes.py | 2 +- .../tests/test_utils_pydantic_tools_extension.py | 4 ++-- 9 files changed, 21 insertions(+), 17 deletions(-) diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index 75c003fff89..a75f2110546 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -33,7 +33,9 @@ def from_key(cls, key: str) -> "DockerLabelKey": # NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description -DockerGenericTag: TypeAlias = Annotated[str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE)] +DockerGenericTag: TypeAlias = Annotated[ + str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE) +] class DockerPlacementConstraint(ConstrainedStr): @@ -139,7 +141,7 @@ def to_simcore_runtime_docker_labels(self) -> dict[DockerLabelKey, str]: """returns a dictionary of strings as required by docker""" return { to_simcore_runtime_docker_label_key(k): f"{v}" - for k, v in sorted(self.dict().items()) + for k, v in sorted(self.model_dump().items()) } @classmethod diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index 1def98ec507..7f57bae6e89 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -58,7 +58,7 @@ async def compute_node_hash( # ensure we do not get pydantic types for hashing here, only jsoneable stuff if isinstance(payload, BaseModel): - payload = payload.dict(by_alias=True, exclude_unset=True) + payload = payload.model_dump(by_alias=True, exclude_unset=True) # remove the payload if it is null and it was resolved if payload is not None: diff --git a/packages/models-library/tests/test__pydantic_models.py b/packages/models-library/tests/test__pydantic_models.py index 548d34f6569..645dc1ffe21 100644 --- a/packages/models-library/tests/test__pydantic_models.py +++ b/packages/models-library/tests/test__pydantic_models.py @@ -50,7 +50,7 @@ class ArgumentAnnotation(BaseModel): "items": {"type": "integer"}, } - assert x_annotation.dict() == { + assert x_annotation.model_dump() == { "name": "x", "data_schema": { "title": "schema[x]", @@ -64,7 +64,7 @@ class ArgumentAnnotation(BaseModel): # # the constructor would expect a raw string but we produced a nested dict with pytest.raises(ValidationError) as exc_info: - ArgumentAnnotation(**x_annotation.dict()) + ArgumentAnnotation(**x_annotation.model_dump()) assert exc_info.value.errors()[0] == { "input": {"items": {"type": "integer"}, "title": "schema[x]", "type": "array"}, @@ -147,7 +147,7 @@ class Func(BaseModel): assert model.input == {"w": 42, "z": False} assert model.output == "some/path/or/string" - # (undefined) json string vs SimCoreFileLink.dict() ------------ + # (undefined) json string vs SimCoreFileLink.model_dump() ------------ MINIMAL = 2 # <--- index of the example with the minimum required fields assert SimCoreFileLink in get_args(OutputTypes) example = SimCoreFileLink.model_validate( diff --git a/packages/models-library/tests/test_api_schemas_catalog.py b/packages/models-library/tests/test_api_schemas_catalog.py index 20d107fd2b0..721f27481e2 100644 --- a/packages/models-library/tests/test_api_schemas_catalog.py +++ b/packages/models-library/tests/test_api_schemas_catalog.py @@ -21,7 +21,7 @@ def test_service_port_with_file(): } ) - port = ServicePortGet.from_service_io("input", "input_1", io).dict( + port = ServicePortGet.from_service_io("input", "input_1", io).model_dump( exclude_unset=True ) @@ -49,7 +49,7 @@ def test_service_port_with_boolean(): } ) - port = ServicePortGet.from_service_io("input", "input_1", io).dict( + port = ServicePortGet.from_service_io("input", "input_1", io).model_dump( exclude_unset=True ) diff --git a/packages/models-library/tests/test_generics.py b/packages/models-library/tests/test_generics.py index 5b7e0b4d610..f94436f1214 100644 --- a/packages/models-library/tests/test_generics.py +++ b/packages/models-library/tests/test_generics.py @@ -78,10 +78,10 @@ def test_enveloped_data_builtin(builtin_type: type, builtin_value: Any): assert envelope == Envelope[builtin_type].from_data(builtin_value) # exports - assert envelope.dict(exclude_unset=True, exclude_none=True) == { + assert envelope.model_dump(exclude_unset=True, exclude_none=True) == { "data": builtin_value } - assert envelope.dict() == {"data": builtin_value, "error": None} + assert envelope.model_dump() == {"data": builtin_value, "error": None} def test_enveloped_data_model(): @@ -92,7 +92,9 @@ class User(BaseModel): enveloped = Envelope[User](data={"idr": 3}) assert isinstance(enveloped.data, User) - assert enveloped.dict(exclude_unset=True, exclude_none=True) == {"data": {"idr": 3}} + assert enveloped.model_dump(exclude_unset=True, exclude_none=True) == { + "data": {"idr": 3} + } def test_enveloped_data_dict(): diff --git a/packages/models-library/tests/test_project_nodes.py b/packages/models-library/tests/test_project_nodes.py index 2edefd1533d..96f427a19cb 100644 --- a/packages/models-library/tests/test_project_nodes.py +++ b/packages/models-library/tests/test_project_nodes.py @@ -31,7 +31,7 @@ def test_create_minimal_node(minimal_node_data_sample: dict[str, Any]): assert node.parent is None assert node.progress is None - assert node.dict(exclude_unset=True) == minimal_node_data_sample + assert node.model_dump(exclude_unset=True) == minimal_node_data_sample def test_create_minimal_node_with_new_data_type( @@ -69,4 +69,4 @@ def test_backwards_compatibility_node_data(minimal_node_data_sample: dict[str, A assert node.state.modified is True assert node.state.dependencies == set() - assert node.dict(exclude_unset=True) != old_node_data + assert node.model_dump(exclude_unset=True) != old_node_data diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index d91b6c7071d..bd181e27fe3 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -69,7 +69,7 @@ def test_simcore_service_labels(example: dict, items: int, uses_dynamic_sidecar: simcore_service_labels = SimcoreServiceLabels.model_validate(example) assert simcore_service_labels - assert len(simcore_service_labels.dict(exclude_unset=True)) == items + assert len(simcore_service_labels.model_dump(exclude_unset=True)) == items assert simcore_service_labels.needs_dynamic_sidecar == uses_dynamic_sidecar diff --git a/packages/models-library/tests/test_sidecar_volumes.py b/packages/models-library/tests/test_sidecar_volumes.py index e9c54554288..402899726bc 100644 --- a/packages/models-library/tests/test_sidecar_volumes.py +++ b/packages/models-library/tests/test_sidecar_volumes.py @@ -14,4 +14,4 @@ def test_volume_state_equality_does_not_use_last_changed(status: VolumeStatus): # at the moment of the creation of the object. assert VolumeState(status=status) == VolumeState(status=status) schema_property_count = len(VolumeState.schema()["properties"]) - assert len(VolumeState(status=status).dict()) == schema_property_count + assert len(VolumeState(status=status).model_dump()) == schema_property_count diff --git a/packages/models-library/tests/test_utils_pydantic_tools_extension.py b/packages/models-library/tests/test_utils_pydantic_tools_extension.py index 34cbb528dbb..0bf8abdbb41 100644 --- a/packages/models-library/tests/test_utils_pydantic_tools_extension.py +++ b/packages/models-library/tests/test_utils_pydantic_tools_extension.py @@ -39,8 +39,8 @@ def test_schema(): def test_only_required(): model = MyModel(a=1, b=2) - assert model.dict() == {"a": 1, "b": 2, "c": 42, "d": None, "e": None} - assert model.dict(exclude_unset=True) == {"a": 1, "b": 2} + assert model.model_dump() == {"a": 1, "b": 2, "c": 42, "d": None, "e": None} + assert model.model_dump(exclude_unset=True) == {"a": 1, "b": 2} def test_parse_obj_or_none(): From 2b6180c2ce103680ba06f5444b145bdee0a9596c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 07:51:00 +0200 Subject: [PATCH 019/280] replace deprecated json methods --- packages/models-library/scripts/validate-pg-projects.py | 2 +- .../models-library/src/models_library/rabbitmq_messages.py | 2 +- packages/models-library/tests/test_service_settings_labels.py | 4 ++-- .../models-library/tests/test_service_settings_nat_rule.py | 2 +- packages/models-library/tests/test_user_preferences.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/models-library/scripts/validate-pg-projects.py b/packages/models-library/scripts/validate-pg-projects.py index 08c34925faa..648b6846876 100644 --- a/packages/models-library/scripts/validate-pg-projects.py +++ b/packages/models-library/scripts/validate-pg-projects.py @@ -64,7 +64,7 @@ def validate_csv_exported_pg_project( if verbose > 1: typer.secho(f"{pid} OK", fg=typer.colors.GREEN) if verbose > 2: - typer.echo(model.json(indent=2)) + typer.echo(model.model_dump_json(indent=2)) except ValidationError as err: failed.append(pid) typer.secho( diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index 13ecda316ae..11e0ad55796 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -46,7 +46,7 @@ def routing_key(self) -> str | None: """ def body(self) -> bytes: - return self.json().encode() + return self.model_dump_json().encode() class ProjectMessageBase(BaseModel): diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index bd181e27fe3..4e0a26553e2 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -543,7 +543,7 @@ def test_can_parse_labels_with_osparc_identifiers( ] service_meta = replace_osparc_variable_identifier(service_meta, vendor_environments) - service_meta_str = service_meta.json() + service_meta_str = service_meta.model_dump_json() not_replaced_vars = {"OSPARC_VARIABLE_OS_TYPE_LINUX"} @@ -552,7 +552,7 @@ def test_can_parse_labels_with_osparc_identifiers( continue assert osparc_variable_name not in service_meta_str - service_meta_str = service_meta.json( + service_meta_str = service_meta.model_dump_json( include={"containers_allowed_outgoing_permit_list"} ) diff --git a/packages/models-library/tests/test_service_settings_nat_rule.py b/packages/models-library/tests/test_service_settings_nat_rule.py index 66319e9435c..456b2a3403c 100644 --- a/packages/models-library/tests/test_service_settings_nat_rule.py +++ b/packages/models-library/tests/test_service_settings_nat_rule.py @@ -87,7 +87,7 @@ def test_nat_rule_with_osparc_variable_identifier( # NOTE: values are mostly replaced in place unless it's used as first level replace_osparc_variable_identifier(nat_rule, osparc_variables) - nat_rule_str = nat_rule.json() + nat_rule_str = nat_rule.model_dump_json() for osparc_variable_name in osparc_variables: assert osparc_variable_name not in nat_rule_str diff --git a/packages/models-library/tests/test_user_preferences.py b/packages/models-library/tests/test_user_preferences.py index 59ffb5a4c27..06e4af9b149 100644 --- a/packages/models-library/tests/test_user_preferences.py +++ b/packages/models-library/tests/test_user_preferences.py @@ -123,7 +123,7 @@ def test__user_service__user_preference( # NOTE: these will be stored as bytes, # check bytes serialization/deserialization - pref1_as_bytes = pref1.json().encode() + pref1_as_bytes = pref1.model_dump_json().encode() new_instance = UserServiceUserPreference.parse_raw(pref1_as_bytes) assert new_instance == pref1 From c88b393662ba721b465adcbd0d03d0681c288db4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 09:43:43 +0200 Subject: [PATCH 020/280] replace deprecated parse_obj_as methods --- .../api_schemas_catalog/__init__.py | 6 ++-- .../api_schemas_clusters_keeper/__init__.py | 8 ++--- .../api_schemas_dynamic_scheduler/__init__.py | 8 ++--- .../api_schemas_efs_guardian/__init__.py | 8 ++--- .../api_schemas_payments/__init__.py | 6 ++-- .../__init__.py | 8 ++--- .../api_schemas_webserver/__init__.py | 6 ++-- .../models_library/rest_pagination_utils.py | 18 +++++------ .../models_library/service_settings_labels.py | 4 +-- .../service_settings_nat_rule.py | 4 +-- .../tests/test__pydantic_models_and_enums.py | 24 ++++++++------- .../test_api_schemas_webserver_projects.py | 24 ++++++++++----- .../tests/test_callbacks_mapping.py | 8 +++-- packages/models-library/tests/test_docker.py | 16 +++++----- .../tests/test_osparc_variable_identifier.py | 30 ++++++++++++------- .../tests/test_project_networks.py | 10 +++---- .../tests/test_project_nodes_io.py | 14 +++++---- .../tests/test_service_settings_labels.py | 11 +++---- .../tests/test_service_settings_nat_rule.py | 10 ++++--- .../tests/test_user_preferences.py | 24 ++++++++------- .../tests/test_utils_service_io.py | 10 +++++-- .../tests/test_utils_specs_substitution.py | 4 +-- 22 files changed, 148 insertions(+), 113 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_catalog/__init__.py b/packages/models-library/src/models_library/api_schemas_catalog/__init__.py index 84d761729a4..2e8c8f75a24 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -CATALOG_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "catalog") +CATALOG_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "catalog" +) diff --git a/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py b/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py index b6570d01c89..79be28f2021 100644 --- a/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_clusters_keeper/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -CLUSTERS_KEEPER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "clusters-keeper" -) +CLUSTERS_KEEPER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("clusters-keeper") diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py index 5631d38e5f9..70a4f1247ba 100644 --- a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -DYNAMIC_SCHEDULER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "dynamic-scheduler" -) +DYNAMIC_SCHEDULER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("dynamic-scheduler") diff --git a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py index 50793febaf9..f47a9a3f8d3 100644 --- a/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_efs_guardian/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "efs-guardian" -) +EFS_GUARDIAN_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("efs-guardian") diff --git a/packages/models-library/src/models_library/api_schemas_payments/__init__.py b/packages/models-library/src/models_library/api_schemas_payments/__init__.py index 30d68367ded..73928d6ccd7 100644 --- a/packages/models-library/src/models_library/api_schemas_payments/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_payments/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -PAYMENTS_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "payments") +PAYMENTS_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter(RPCNamespace).validate_python( + "payments" +) diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py index 295897e5b1d..d32b474edf6 100644 --- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/__init__.py @@ -1,9 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -RESOURCE_USAGE_TRACKER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as( - RPCNamespace, "resource-usage-tracker" -) +RESOURCE_USAGE_TRACKER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("resource-usage-tracker") diff --git a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py index f30e0f0790d..c95f68ab78c 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py @@ -1,7 +1,9 @@ from typing import Final -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq_basic_types import RPCNamespace -WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = parse_obj_as(RPCNamespace, "webserver") +WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("webserver") diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index c303ee8e351..ff56a329f80 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -1,7 +1,7 @@ from math import ceil from typing import Any, Protocol, TypedDict, Union, runtime_checkable -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .basic_types import AnyHttpUrl from .rest_pagination import PageLinks, PageMetaInfoLimitOffset @@ -73,24 +73,21 @@ def paginate_data( ), _links=PageLinks( self=( - parse_obj_as( - AnyHttpUrl, + TypeAdapter(AnyHttpUrl).validate_python( _replace_query(request_url, {"offset": offset, "limit": limit}), ) ), - first=parse_obj_as( - AnyHttpUrl, _replace_query(request_url, {"offset": 0, "limit": limit}) + first=TypeAdapter(AnyHttpUrl).validate_python( + _replace_query(request_url, {"offset": 0, "limit": limit}) ), - prev=parse_obj_as( - AnyHttpUrl, + prev=TypeAdapter(AnyHttpUrl).validate_python( _replace_query( request_url, {"offset": max(offset - limit, 0), "limit": limit} ), ) if offset > 0 else None, - next=parse_obj_as( - AnyHttpUrl, + next=TypeAdapter(AnyHttpUrl).validate_python( _replace_query( request_url, {"offset": min(offset + limit, last_page * limit), "limit": limit}, @@ -98,8 +95,7 @@ def paginate_data( ) if offset < (last_page * limit) else None, - last=parse_obj_as( - AnyHttpUrl, + last=TypeAdapter(AnyHttpUrl).validate_python( _replace_query( request_url, {"offset": last_page * limit, "limit": limit} ), diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 8f954737231..abc308dcfb4 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -12,11 +12,11 @@ Field, Json, PrivateAttr, + TypeAdapter, ValidationError, ValidationInfo, field_validator, model_validator, - parse_obj_as, ) from .callbacks_mapping import CallbacksMapping @@ -203,7 +203,7 @@ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None: for path_str, size_str in v.items(): # checks that format is correct try: - parse_obj_as(ByteSize, size_str) + TypeAdapter(ByteSize).validate_python(size_str) except ValidationError as e: msg = f"Provided size='{size_str}' contains invalid charactes: {e!s}" raise ValueError(msg) from e diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py index ee937c81254..9864fbfae88 100644 --- a/packages/models-library/src/models_library/service_settings_nat_rule.py +++ b/packages/models-library/src/models_library/service_settings_nat_rule.py @@ -5,9 +5,9 @@ BaseModel, ConfigDict, Field, + TypeAdapter, ValidationInfo, field_validator, - parse_obj_as, ) from .basic_types import PortInt @@ -15,7 +15,7 @@ # Cloudflare DNS server address DEFAULT_DNS_SERVER_ADDRESS: Final[str] = "1.1.1.1" # NOSONAR -DEFAULT_DNS_SERVER_PORT: Final[PortInt] = parse_obj_as(PortInt, 53) +DEFAULT_DNS_SERVER_PORT: Final[PortInt] = TypeAdapter(PortInt).validate_python(53) class _PortRange(BaseModel): diff --git a/packages/models-library/tests/test__pydantic_models_and_enums.py b/packages/models-library/tests/test__pydantic_models_and_enums.py index 51f0226ee80..00c67c32c9b 100644 --- a/packages/models-library/tests/test__pydantic_models_and_enums.py +++ b/packages/models-library/tests/test__pydantic_models_and_enums.py @@ -2,7 +2,7 @@ import pytest from models_library.utils.enums import are_equivalent_enums, enum_to_dict -from pydantic import BaseModel, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError # @@ -76,16 +76,16 @@ class Model(BaseModel): def test_parsing_enums_in_pydantic(): - model = parse_obj_as(Model, {"color": Color1.RED}) + model = TypeAdapter(Model).validate_python({"color": Color1.RED}) assert model.color == Color1.RED # Can parse from STRING - model = parse_obj_as(Model, {"color": "RED"}) + model = TypeAdapter(Model).validate_python({"color": "RED"}) assert model.color == Color1.RED # Can **NOT** parse from equilalent enum with pytest.raises(ValidationError): - parse_obj_as(Model, {"color": Color2.RED}) + TypeAdapter(Model).validate_python({"color": Color2.RED}) class ModelStrAndEnum(BaseModel): @@ -95,30 +95,32 @@ class ModelStrAndEnum(BaseModel): def test_parsing_strenum_in_pydantic(): assert are_equivalent_enums(Color1, ColorStrAndEnum1) - model = parse_obj_as(ModelStrAndEnum, {"color": ColorStrAndEnum1.RED}) + model = TypeAdapter(ModelStrAndEnum).validate_python( + {"color": ColorStrAndEnum1.RED} + ) assert model.color == ColorStrAndEnum1.RED # Can parse from string - model = parse_obj_as(ModelStrAndEnum, {"color": "RED"}) + model = TypeAdapter(ModelStrAndEnum).validate_python({"color": "RED"}) assert model.color == ColorStrAndEnum1.RED # **CAN** parse other equivalent str-enum # Using str-enums allow you to parse from equivalent enums! - parse_obj_as(ModelStrAndEnum, {"color": ColorStrAndEnum2.RED}) + TypeAdapter(ModelStrAndEnum).validate_python({"color": ColorStrAndEnum2.RED}) def test_parsing_str_and_enum_in_pydantic(): - # Can still NOT parse equilalent enum(-only) + # Can still NOT parse equivalent enum(-only) # with pytest.raises(ValidationError): - # parse_obj_as(ModelStrAndEnum, {"color": Color1.RED}) + # TypeAdapter(ModelStrAndEnum).validate_python({"color": Color1.RED}) # And the opposite? NO!!! with pytest.raises(ValidationError): - parse_obj_as(Color1, {"color": ColorStrAndEnum1.RED}) + TypeAdapter(Color1).validate_python({"color": ColorStrAndEnum1.RED}) with pytest.raises(ValidationError): - parse_obj_as(Color1, {"color": ColorStrAndEnum2.RED}) + TypeAdapter(Color1).validate_python({"color": ColorStrAndEnum2.RED}) # CONCLUSION: we need a validator to pre-process inputs ! # SEE models_library.utils.common_validators diff --git a/packages/models-library/tests/test_api_schemas_webserver_projects.py b/packages/models-library/tests/test_api_schemas_webserver_projects.py index ea67927fd87..295e9ee2304 100644 --- a/packages/models-library/tests/test_api_schemas_webserver_projects.py +++ b/packages/models-library/tests/test_api_schemas_webserver_projects.py @@ -14,7 +14,7 @@ ) from models_library.generics import Envelope from models_library.rest_pagination import Page -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.simcore_webserver_projects_rest_api import ( CREATE_FROM_SERVICE, CREATE_FROM_TEMPLATE, @@ -37,9 +37,9 @@ def test_create_project_schemas(api_call: HttpApiCallCapture): request_payload = ProjectCreateNew.model_validate(api_call.request_payload) assert request_payload - response_body = parse_obj_as( - Envelope[ProjectGet] | Envelope[TaskProjectGet], api_call.response_body - ) + response_body = TypeAdapter( + Envelope[ProjectGet] | Envelope[TaskProjectGet] + ).validate_python(api_call.response_body) assert response_body @@ -51,7 +51,9 @@ def test_create_project_schemas(api_call: HttpApiCallCapture): def test_list_project_schemas(api_call: HttpApiCallCapture): assert api_call.request_payload is None - response_body = parse_obj_as(Page[ProjectListItem], api_call.response_body) + response_body = TypeAdapter(Page[ProjectListItem]).validate_python( + api_call.response_body + ) assert response_body @@ -64,7 +66,9 @@ def test_get_project_schemas(api_call: HttpApiCallCapture): # NOTE: that response_body here is the exported values # and therefore ProjectGet has to be implemented in such a way that # can also parse exported values! (e.g. Json does not allow that, or ocassionaly exclude_none) - response_body = parse_obj_as(Envelope[ProjectGet], api_call.response_body) + response_body = TypeAdapter(Envelope[ProjectGet]).validate_python( + api_call.response_body + ) assert response_body @@ -74,8 +78,12 @@ def test_get_project_schemas(api_call: HttpApiCallCapture): ids=lambda c: c.name, ) def test_replace_project_schemas(api_call: HttpApiCallCapture): - request_payload = parse_obj_as(ProjectReplace, api_call.request_payload) + request_payload = TypeAdapter(ProjectReplace).validate_python( + api_call.request_payload + ) assert request_payload - response_body = parse_obj_as(Envelope[ProjectGet], api_call.response_body) + response_body = TypeAdapter(Envelope[ProjectGet]).validate_python( + api_call.response_body + ) assert response_body diff --git a/packages/models-library/tests/test_callbacks_mapping.py b/packages/models-library/tests/test_callbacks_mapping.py index e1c0df003c6..e39db6367ad 100644 --- a/packages/models-library/tests/test_callbacks_mapping.py +++ b/packages/models-library/tests/test_callbacks_mapping.py @@ -6,7 +6,7 @@ TIMEOUT_MIN, CallbacksMapping, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError def _format_with_timeout(timeout: float) -> dict[str, Any]: @@ -20,8 +20,10 @@ def test_inactivity_time_out_is_max_capped(): INACTIVITY_TIMEOUT_CAP - 1, INACTIVITY_TIMEOUT_CAP, ]: - parse_obj_as(CallbacksMapping, _format_with_timeout(in_bounds)) + TypeAdapter(CallbacksMapping).validate_python(_format_with_timeout(in_bounds)) for out_of_bounds in [INACTIVITY_TIMEOUT_CAP + 1, TIMEOUT_MIN - 1]: with pytest.raises(ValidationError): - parse_obj_as(CallbacksMapping, _format_with_timeout(out_of_bounds)) + TypeAdapter(CallbacksMapping).validate_python( + _format_with_timeout(out_of_bounds) + ) diff --git a/packages/models-library/tests/test_docker.py b/packages/models-library/tests/test_docker.py index 87dc86f504f..dd5fed89951 100644 --- a/packages/models-library/tests/test_docker.py +++ b/packages/models-library/tests/test_docker.py @@ -13,7 +13,7 @@ DockerLabelKey, StandardSimcoreDockerLabels, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError _faker = Faker() @@ -40,11 +40,11 @@ def test_docker_label_key(label_key: str, valid: bool): # NOTE: https://docs.docker.com/config/labels-custom-metadata/#key-format-recommendations if valid: - instance = parse_obj_as(DockerLabelKey, label_key) + instance = TypeAdapter(DockerLabelKey).validate_python(label_key) assert instance else: with pytest.raises(ValidationError): - parse_obj_as(DockerLabelKey, label_key) + TypeAdapter(DockerLabelKey).validate_python(label_key) @pytest.mark.parametrize( @@ -94,11 +94,11 @@ def test_docker_label_key(label_key: str, valid: bool): ) def test_docker_generic_tag(image_name: str, valid: bool): if valid: - instance = parse_obj_as(DockerGenericTag, image_name) + instance = TypeAdapter(DockerGenericTag).validate_python(image_name) assert instance else: with pytest.raises(ValidationError): - parse_obj_as(DockerGenericTag, image_name) + TypeAdapter(DockerGenericTag).validate_python(image_name) @pytest.mark.parametrize( @@ -117,8 +117,8 @@ def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): assert all( key.startswith(_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX) for key in exported_dict ) - re_imported_docker_label_keys = parse_obj_as( - StandardSimcoreDockerLabels, exported_dict - ) + re_imported_docker_label_keys = TypeAdapter( + StandardSimcoreDockerLabels + ).validate_python(exported_dict) assert re_imported_docker_label_keys assert simcore_service_docker_label_keys == re_imported_docker_label_keys diff --git a/packages/models-library/tests/test_osparc_variable_identifier.py b/packages/models-library/tests/test_osparc_variable_identifier.py index 18b48c299bd..af32dd7c42c 100644 --- a/packages/models-library/tests/test_osparc_variable_identifier.py +++ b/packages/models-library/tests/test_osparc_variable_identifier.py @@ -10,7 +10,7 @@ raise_if_unresolved_osparc_variable_identifier_found, replace_osparc_variable_identifier, ) -from pydantic import BaseModel, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError VALID_IDENTIFIERS: list[str] = [ "$OSPARC_VARIABLE_One121_", @@ -50,13 +50,15 @@ def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: def identifier( osparc_variable_identifier_str: str, ) -> OsparcVariableIdentifier: - return parse_obj_as(OsparcVariableIdentifier, osparc_variable_identifier_str) + return TypeAdapter(OsparcVariableIdentifier).validate_python( + osparc_variable_identifier_str + ) @pytest.mark.parametrize("invalid_var_name", INVALID_IDENTIFIERS) def test_osparc_variable_identifier_does_not_validate(invalid_var_name: str): with pytest.raises(ValidationError): - parse_obj_as(OsparcVariableIdentifier, invalid_var_name) + TypeAdapter(OsparcVariableIdentifier).validate_python(invalid_var_name) def test_raise_if_unresolved(identifier: OsparcVariableIdentifier): @@ -76,13 +78,19 @@ class Example(BaseModel): @pytest.mark.parametrize( "object_template", [ - parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1"), - [parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")], - (parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1"),), - {parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")}, - {"test": parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1")}, + TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1"), + [TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1")], + (TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1"),), + {TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1")}, + { + "test": TypeAdapter(OsparcVariableIdentifier).validate_python( + "$OSPARC_VARIABLE_1" + ) + }, Example( - nested_objects=parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_1") + nested_objects=TypeAdapter(OsparcVariableIdentifier).validate_python( + "$OSPARC_VARIABLE_1" + ) ), ], ) @@ -147,6 +155,8 @@ def test_osparc_variable_name_and_default_value( expected_osparc_variable_name: str, expected_default_value: str | None, ): - osparc_variable_identifer = parse_obj_as(OsparcVariableIdentifier, str_identifier) + osparc_variable_identifer = TypeAdapter(OsparcVariableIdentifier).validate_python( + str_identifier + ) assert osparc_variable_identifer.name == expected_osparc_variable_name assert osparc_variable_identifer.default_value == expected_default_value diff --git a/packages/models-library/tests/test_project_networks.py b/packages/models-library/tests/test_project_networks.py index 0874cf5bd71..a929ac2a0aa 100644 --- a/packages/models-library/tests/test_project_networks.py +++ b/packages/models-library/tests/test_project_networks.py @@ -7,7 +7,7 @@ DockerNetworkName, NetworksWithAliases, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError @pytest.mark.parametrize( @@ -44,16 +44,16 @@ def test_networks_with_aliases_fail(invalid_example: dict) -> None: @pytest.mark.parametrize("network_name", ["a", "ok", "a_", "A_", "a1", "a-"]) def test_projects_networks_validation(network_name: str) -> None: - assert parse_obj_as(DockerNetworkName, network_name) == network_name - assert parse_obj_as(DockerNetworkAlias, network_name) == network_name + assert TypeAdapter(DockerNetworkName).validate_python(network_name) == network_name + assert TypeAdapter(DockerNetworkAlias).validate_python(network_name) == network_name @pytest.mark.parametrize("network_name", ["", "1", "-", "_"]) def test_projects_networks_validation_fails(network_name: str) -> None: with pytest.raises(ValidationError): - parse_obj_as(DockerNetworkName, network_name) + TypeAdapter(DockerNetworkName).validate_python(network_name) with pytest.raises(ValidationError): - parse_obj_as(DockerNetworkAlias, network_name) + TypeAdapter(DockerNetworkAlias).validate_python(network_name) def test_class_constructors_fail() -> None: diff --git a/packages/models-library/tests/test_project_nodes_io.py b/packages/models-library/tests/test_project_nodes_io.py index aac9568eccb..9a191c7d674 100644 --- a/packages/models-library/tests/test_project_nodes_io.py +++ b/packages/models-library/tests/test_project_nodes_io.py @@ -12,7 +12,7 @@ SimCoreFileLink, SimcoreS3DirectoryID, ) -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError @pytest.fixture() @@ -120,11 +120,13 @@ def test_store_discriminator(): def test_simcore_s3_directory_id(): # the only allowed path is the following - result = parse_obj_as(SimcoreS3DirectoryID, f"{UUID_0}/{UUID_0}/ok-simcore-dir/") + result = TypeAdapter(SimcoreS3DirectoryID).validate_python( + f"{UUID_0}/{UUID_0}/ok-simcore-dir/" + ) assert result == f"{UUID_0}/{UUID_0}/ok-simcore-dir/" # re-parsing must work the same thing works - assert parse_obj_as(SimcoreS3DirectoryID, result) + assert TypeAdapter(SimcoreS3DirectoryID).validate_python(result) # all below are not allowed for invalid_path in ( @@ -132,10 +134,12 @@ def test_simcore_s3_directory_id(): f"{UUID_0}/{UUID_0}/a-dir/a-file", ): with pytest.raises(ValidationError): - parse_obj_as(SimcoreS3DirectoryID, invalid_path) + TypeAdapter(SimcoreS3DirectoryID).validate_python(invalid_path) with pytest.raises(ValidationError, match="Not allowed subdirectory found in"): - parse_obj_as(SimcoreS3DirectoryID, f"{UUID_0}/{UUID_0}/a-dir/a-subdir/") + TypeAdapter(SimcoreS3DirectoryID).validate_python( + f"{UUID_0}/{UUID_0}/a-dir/a-subdir/" + ) @pytest.mark.parametrize( diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index 4e0a26553e2..0ae7824ac8a 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -31,7 +31,7 @@ ) from models_library.services_resources import DEFAULT_SINGLE_SERVICE_NAME from models_library.utils.string_substitution import TextTemplate -from pydantic import BaseModel, TypeAdapter, ValidationError, parse_obj_as +from pydantic import BaseModel, TypeAdapter, ValidationError from pydantic.json import pydantic_encoder @@ -526,17 +526,14 @@ def test_can_parse_labels_with_osparc_identifiers( nat_rule: NATRule = service_meta.containers_allowed_outgoing_permit_list[ "s4l-core" ][0] - assert nat_rule.hostname == parse_obj_as( - OsparcVariableIdentifier, + assert nat_rule.hostname == TypeAdapter(OsparcVariableIdentifier).validate_python( "${OSPARC_VARIABLE_VENDOR_SECRET_LICENSE_SERVER_HOSTNAME}", ) assert nat_rule.tcp_ports == [ - parse_obj_as( - OsparcVariableIdentifier, + TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_VENDOR_SECRET_TCP_PORTS_1", ), - parse_obj_as( - OsparcVariableIdentifier, + TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_VENDOR_SECRET_TCP_PORTS_2", ), 3, diff --git a/packages/models-library/tests/test_service_settings_nat_rule.py b/packages/models-library/tests/test_service_settings_nat_rule.py index 456b2a3403c..c6f9f05497c 100644 --- a/packages/models-library/tests/test_service_settings_nat_rule.py +++ b/packages/models-library/tests/test_service_settings_nat_rule.py @@ -9,7 +9,7 @@ replace_osparc_variable_identifier, ) from models_library.service_settings_nat_rule import NATRule -from pydantic import parse_obj_as +from pydantic import TypeAdapter SUPPORTED_TEMPLATES: set[str] = { "$OSPARC_VARIABLE_%s", @@ -79,7 +79,7 @@ def _all_combinations_from_dict(data: dict[Any, Any]) -> list[dict[Any, Any]]: def test_nat_rule_with_osparc_variable_identifier( nat_rule_dict: dict[str, Any], osparc_variables: dict[str, Any] ): - nat_rule = parse_obj_as(NATRule, nat_rule_dict) + nat_rule = TypeAdapter(NATRule).validate_python(nat_rule_dict) with pytest.raises(UnresolvedOsparcVariableIdentifierError): list(nat_rule.iter_tcp_ports()) @@ -108,7 +108,9 @@ def test_nat_rule_with_osparc_variable_identifier( ], ) def test_______(replace_with_value: Any): - a_var = parse_obj_as(OsparcVariableIdentifier, "$OSPARC_VARIABLE_some_var") + a_var = TypeAdapter(OsparcVariableIdentifier).validate_python( + "$OSPARC_VARIABLE_some_var" + ) assert isinstance(a_var, OsparcVariableIdentifier) replaced_var = replace_osparc_variable_identifier( @@ -151,7 +153,7 @@ def test_replace_an_instance_of_osparc_variable_identifier( except TypeError: formatted_template = var_template - a_var = parse_obj_as(OsparcVariableIdentifier, formatted_template) + a_var = TypeAdapter(OsparcVariableIdentifier).validate_python(formatted_template) assert isinstance(a_var, OsparcVariableIdentifier) replace_with_identifier_default = identifier_has_default and replace_with_default diff --git a/packages/models-library/tests/test_user_preferences.py b/packages/models-library/tests/test_user_preferences.py index 06e4af9b149..f90d2cd482a 100644 --- a/packages/models-library/tests/test_user_preferences.py +++ b/packages/models-library/tests/test_user_preferences.py @@ -15,20 +15,24 @@ _AutoRegisterMeta, _BaseUserPreferenceModel, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter _SERVICE_KEY_AND_VERSION_SAMPLES: list[tuple[ServiceKey, ServiceVersion]] = [ ( - parse_obj_as(ServiceKey, "simcore/services/comp/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python("simcore/services/comp/something-1231"), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ( - parse_obj_as(ServiceKey, "simcore/services/dynamic/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/something-1231" + ), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ( - parse_obj_as(ServiceKey, "simcore/services/frontend/something-1231"), - parse_obj_as(ServiceVersion, "0.0.1"), + TypeAdapter(ServiceKey).validate_python( + "simcore/services/frontend/something-1231" + ), + TypeAdapter(ServiceVersion).validate_python("0.0.1"), ), ] @@ -54,7 +58,7 @@ def test_base_user_preference_model(value: Any, preference_type: PreferenceType) base_data = _get_base_user_preferences_data( preference_type=preference_type, value=value ) - assert parse_obj_as(_BaseUserPreferenceModel, base_data) + assert TypeAdapter(_BaseUserPreferenceModel).validate_python(base_data) def test_frontend_preferences(value: Any): @@ -64,7 +68,7 @@ def test_frontend_preferences(value: Any): base_data.update({"preference_identifier": "pref-name"}) # check serialization - frontend_preference = parse_obj_as(FrontendUserPreference, base_data) + frontend_preference = TypeAdapter(FrontendUserPreference).validate_python(base_data) assert set(frontend_preference.to_db().keys()) == {"value"} @@ -80,7 +84,7 @@ def test_user_service_preferences(value: Any, mock_file_path: Path): "file_path": mock_file_path, } ) - instance = parse_obj_as(UserServiceUserPreference, base_data) + instance = TypeAdapter(UserServiceUserPreference).validate_python(base_data) assert set(instance.to_db().keys()) == { "value", "service_key", diff --git a/packages/models-library/tests/test_utils_service_io.py b/packages/models-library/tests/test_utils_service_io.py index 15ea8cfcae8..7ef8d4070a0 100644 --- a/packages/models-library/tests/test_utils_service_io.py +++ b/packages/models-library/tests/test_utils_service_io.py @@ -17,7 +17,7 @@ from models_library.services import ServiceInput, ServiceOutput, ServicePortKey from models_library.utils.json_schema import jsonschema_validate_schema from models_library.utils.services_io import get_service_io_json_schema -from pydantic import parse_obj_as +from pydantic import TypeAdapter example_inputs_labels = [ e for e in ServiceInput.model_config["json_schema_extra"]["examples"] if e["label"] @@ -73,8 +73,12 @@ def test_against_service_metadata_configs(metadata_path: Path): meta = json.loads(metadata_path.read_text()) - inputs = parse_obj_as(dict[ServicePortKey, ServiceInput], meta["inputs"]) - outputs = parse_obj_as(dict[ServicePortKey, ServiceOutput], meta["outputs"]) + inputs = TypeAdapter(dict[ServicePortKey, ServiceInput]).validate_python( + meta["inputs"] + ) + outputs = TypeAdapter(dict[ServicePortKey, ServiceOutput]).validate_python( + meta["outputs"] + ) for port in itertools.chain(inputs.values(), outputs.values()): schema = get_service_io_json_schema(port) diff --git a/packages/models-library/tests/test_utils_specs_substitution.py b/packages/models-library/tests/test_utils_specs_substitution.py index 0670e56e271..c523271bd2a 100644 --- a/packages/models-library/tests/test_utils_specs_substitution.py +++ b/packages/models-library/tests/test_utils_specs_substitution.py @@ -12,7 +12,7 @@ SpecsSubstitutionsResolver, SubstitutionValue, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter @pytest.fixture() @@ -49,7 +49,7 @@ def available_osparc_variables( "SERVICE_VERSION": service_version, "DISPLAY": "True", } - return parse_obj_as(dict[str, SubstitutionValue], environs) + return TypeAdapter(dict[str, SubstitutionValue]).validate_python(environs) @pytest.mark.parametrize( From ceb9f69a7ddce681e6263c993c8e5d2428b548a0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 09:47:51 +0200 Subject: [PATCH 021/280] update model config access --- .../tests/container_tasks/test_docker.py | 2 +- .../tests/container_tasks/test_events.py | 4 ++-- .../tests/container_tasks/test_io.py | 4 +++- .../tests/container_tasks/test_protocol.py | 6 +++-- .../tests/email/test_email_events.py | 2 +- .../helpers/webserver_projects.py | 6 ++--- .../src/pytest_simcore/pydantic_models.py | 2 +- .../services_api_mocks_for_aiohttp_clients.py | 22 +++++++++++++------ .../tests/unit/test_node_ports_v2_port.py | 3 ++- .../tests/unit/test_storage_client.py | 2 +- 10 files changed, 33 insertions(+), 20 deletions(-) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_docker.py b/packages/dask-task-models-library/tests/container_tasks/test_docker.py index 307fe175547..4eb5bc74980 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_docker.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_docker.py @@ -4,7 +4,7 @@ @pytest.mark.parametrize("model_cls", [(DockerBasicAuth)]) def test_docker_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_events.py b/packages/dask-task-models-library/tests/container_tasks/test_events.py index 16a308e11e0..1aa4139720d 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_events.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_events.py @@ -26,7 +26,7 @@ def test_task_event_abstract(): @pytest.mark.parametrize("model_cls", [TaskProgressEvent, TaskLogEvent]) def test_events_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -51,7 +51,7 @@ def mocked_dask_worker_job_id(mocker: MockerFixture, job_id: str) -> str: return job_id -@pytest.fixture(params=TaskOwner.Config.schema_extra["examples"]) +@pytest.fixture(params=TaskOwner.model_config["json_schema_extra"]["examples"]) def task_owner(request: pytest.FixtureRequest) -> TaskOwner: return TaskOwner(**request.param) diff --git a/packages/dask-task-models-library/tests/container_tasks/test_io.py b/packages/dask-task-models-library/tests/container_tasks/test_io.py index 14527d92391..d0b4cb196ed 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_io.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_io.py @@ -69,7 +69,9 @@ def _create_fake_outputs( def test_create_task_output_from_task_with_optional_fields_as_required( tmp_path: Path, optional_fields_set: bool, faker: Faker ): - for schema_example in TaskOutputDataSchema.Config.schema_extra["examples"]: + for schema_example in TaskOutputDataSchema.model_config["json_schema_extra"][ + "examples" + ]: task_output_schema = TaskOutputDataSchema.parse_obj(schema_example) outputs_file_name = _create_fake_outputs( diff --git a/packages/dask-task-models-library/tests/container_tasks/test_protocol.py b/packages/dask-task-models-library/tests/container_tasks/test_protocol.py index d17202adabd..3c70924a043 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_protocol.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_protocol.py @@ -9,7 +9,7 @@ @pytest.mark.parametrize("model_cls", [TaskOwner, ContainerTaskParameters]) def test_events_models_examples(model_cls): - examples = model_cls.Config.schema_extra["examples"] + examples = model_cls.model_config["json_schema_extra"]["examples"] for index, example in enumerate(examples): print(f"{index:-^10}:\n", example) @@ -19,7 +19,9 @@ def test_events_models_examples(model_cls): def test_task_owner_parent_valid(faker: Faker): - invalid_task_owner_example = TaskOwner.Config.schema_extra["examples"][0] + invalid_task_owner_example = TaskOwner.model_config["json_schema_extra"][ + "examples" + ][0] invalid_task_owner_example["parent_project_id"] = faker.uuid4() assert invalid_task_owner_example["parent_node_id"] is None with pytest.raises(ValidationError, match=r".+ are None or both are set!"): diff --git a/packages/notifications-library/tests/email/test_email_events.py b/packages/notifications-library/tests/email/test_email_events.py index 5e3786ab234..9a1b383522f 100644 --- a/packages/notifications-library/tests/email/test_email_events.py +++ b/packages/notifications-library/tests/email/test_email_events.py @@ -66,7 +66,7 @@ def ipinfo(faker: Faker) -> dict[str, Any]: @pytest.fixture def request_form(faker: Faker) -> dict[str, Any]: return AccountRequestInfo( - **AccountRequestInfo.Config.schema_extra["example"] + **AccountRequestInfo.model_config["json_schema_extra"]["example"] ).dict() diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index b6687e22239..a190fa6900e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -78,9 +78,9 @@ async def create_project( project_nodes={ NodeID(node_id): ProjectNodeCreate( node_id=NodeID(node_id), - required_resources=ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0], + required_resources=ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0], ) for node_id in project_data.get("workbench", {}) }, diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py index 297e9a9ab13..035e793ccbf 100644 --- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py @@ -51,7 +51,7 @@ def walk_model_examples_in_package(package: ModuleType) -> Iterator[ModelExample def iter_model_examples_in_module(module: object) -> Iterator[ModelExample]: - """Iterates on all examples defined as BaseModelClass.Config.schema_extra["example"] + """Iterates on all examples defined as BaseModelClass.model_config["json_schema_extra"]["example"] Usage: diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index dffe3883c61..704f1a33990 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -108,7 +108,7 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: ], } returned_computation = ComputationTask.parse_obj( - ComputationTask.Config.schema_extra["examples"][0] + ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ "id": f"{kwargs['json']['project_id']}", @@ -132,7 +132,7 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: pipeline: dict[str, list[str]] = FULL_PROJECT_PIPELINE_ADJACENCY node_states = FULL_PROJECT_NODE_STATES returned_computation = ComputationTask.parse_obj( - ComputationTask.Config.schema_extra["examples"][0] + ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ "id": Path(url.path).name, @@ -155,7 +155,7 @@ def create_cluster_cb(url, **kwargs) -> CallbackResult: assert "json" in kwargs, f"missing body in call to {url}" assert url.query.get("user_id") random_cluster = Cluster.parse_obj( - random.choice(Cluster.Config.schema_extra["examples"]) + random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) return CallbackResult( status=201, payload=json.loads(random_cluster.json(by_alias=True)) @@ -170,7 +170,9 @@ def list_clusters_cb(url, **kwargs) -> CallbackResult: [ json.loads( Cluster.parse_obj( - random.choice(Cluster.Config.schema_extra["examples"]) + random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ) ).json(by_alias=True) ) for _ in range(3) @@ -187,7 +189,9 @@ def get_cluster_cb(url, **kwargs) -> CallbackResult: payload=json.loads( Cluster.parse_obj( { - **random.choice(Cluster.Config.schema_extra["examples"]), + **random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ), **{"id": cluster_id}, } ).json(by_alias=True) @@ -216,7 +220,9 @@ def patch_cluster_cb(url, **kwargs) -> CallbackResult: payload=json.loads( Cluster.parse_obj( { - **random.choice(Cluster.Config.schema_extra["examples"]), + **random.choice( + Cluster.model_config["json_schema_extra"]["examples"] + ), **{"id": cluster_id}, } ).json(by_alias=True) @@ -436,7 +442,9 @@ async def storage_v0_service_mock( aioresponses_mocker.get( get_file_metadata_pattern, status=status.HTTP_200_OK, - payload={"data": FileMetaDataGet.Config.schema_extra["examples"][0]}, + payload={ + "data": FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] + }, repeat=True, ) aioresponses_mocker.get( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 8485e19b74b..125bd02d45d 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -219,7 +219,8 @@ async def mock_filemanager(mocker: MockerFixture, e_tag: str, faker: Faker) -> N mocker.patch( "simcore_sdk.node_ports_common.filemanager._get_file_meta_data", return_value=parse_obj_as( - FileMetaDataGet, FileMetaDataGet.Config.schema_extra["examples"][0] + FileMetaDataGet, + FileMetaDataGet.model_config["json_schema_extra"]["examples"][0], ), ) mocker.patch( diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index 91e46c5bd61..fdeefc66dda 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -177,7 +177,7 @@ async def test_get_file_metada( ) assert file_metadata assert file_metadata == FileMetaDataGet.parse_obj( - FileMetaDataGet.Config.schema_extra["examples"][0] + FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] ) From e0bc99bf26d7ba83a5c403576453105131031039 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 17:56:48 +0200 Subject: [PATCH 022/280] update pydantic settings test --- .../tests/test__pydantic_settings.py | 85 +++++++++---------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 44168ff636a..db098496af6 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -12,48 +12,50 @@ """ - -from pydantic import validator -from pydantic.fields import ModelField, Undefined +from pydantic import ValidationInfo, field_validator +from pydantic.fields import PydanticUndefined from pydantic_settings import BaseSettings +from settings_library.base import allows_none def assert_field_specs( - model_cls, name, is_required, is_nullable, explicit_default, defaults + model_cls: type[BaseSettings], + name: str, + is_required: bool, + is_nullable: bool, + explicit_default, ): - field: ModelField = model_cls.__fields__[name] - print(field, field.field_info) + info = model_cls.model_fields[name] + print(info) - assert field.required == is_required - assert field.allow_none == is_nullable - assert field.field_info.default == explicit_default + assert info.is_required() == is_required + assert allows_none(info) == is_nullable - assert field.default == defaults - if field.required: + if info.is_required(): # in this case, default is not really used - assert field.default is None + assert info.default is PydanticUndefined + else: + assert info.default == explicit_default class Settings(BaseSettings): VALUE: int VALUE_DEFAULT: int = 42 - VALUE_NULLABLE_REQUIRED: int | None = ... # type: ignore - VALUE_NULLABLE_OPTIONAL: int | None + VALUE_NULLABLE_REQUIRED: int | None = ... # type: ignore[assignment] + VALUE_NULLABLE_REQUIRED_AS_WELL: int | None VALUE_NULLABLE_DEFAULT_VALUE: int | None = 42 VALUE_NULLABLE_DEFAULT_NULL: int | None = None # Other ways to write down "required" is using ... - VALUE_ALSO_REQUIRED: int = ... # type: ignore + VALUE_REQUIRED_AS_WELL: int = ... # type: ignore[assignment] - # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. - # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, values, field: ModelField): + def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if field.allow_none: + if info.field_name and allows_none(cls.model_fields[info.field_name]): if isinstance(v, str) and v.lower() in ("null", "none"): return None return v @@ -69,8 +71,7 @@ def test_fields_declarations(): "VALUE", is_required=True, is_nullable=False, - explicit_default=Undefined, - defaults=None, + explicit_default=PydanticUndefined, ) assert_field_specs( @@ -79,7 +80,6 @@ def test_fields_declarations(): is_required=False, is_nullable=False, explicit_default=42, - defaults=42, ) assert_field_specs( @@ -88,16 +88,14 @@ def test_fields_declarations(): is_required=True, is_nullable=True, explicit_default=Ellipsis, - defaults=None, ) assert_field_specs( Settings, - "VALUE_NULLABLE_OPTIONAL", - is_required=False, + "VALUE_NULLABLE_REQUIRED_AS_WELL", + is_required=True, is_nullable=True, - explicit_default=Undefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL - defaults=None, + explicit_default=PydanticUndefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL ) # VALUE_NULLABLE_OPTIONAL interpretation has always been confusing @@ -110,7 +108,6 @@ def test_fields_declarations(): is_required=False, is_nullable=True, explicit_default=42, - defaults=42, ) assert_field_specs( @@ -118,43 +115,43 @@ def test_fields_declarations(): "VALUE_NULLABLE_DEFAULT_NULL", is_required=False, is_nullable=True, - explicit_default=None, # <- difference wrt VALUE_NULLABLE_OPTIONAL - defaults=None, + explicit_default=None, ) assert_field_specs( Settings, - "VALUE_ALSO_REQUIRED", + "VALUE_REQUIRED_AS_WELL", is_required=True, is_nullable=False, explicit_default=Ellipsis, - defaults=None, ) def test_construct(monkeypatch): # from __init__ settings_from_init = Settings( - VALUE=1, VALUE_ALSO_REQUIRED=10, VALUE_NULLABLE_REQUIRED=None + VALUE=1, + VALUE_NULLABLE_REQUIRED=None, + VALUE_NULLABLE_REQUIRED_AS_WELL=None, + VALUE_REQUIRED_AS_WELL=10, ) - print(settings_from_init.json(exclude_unset=True, indent=1)) + print(settings_from_init.model_dump_json(exclude_unset=True, indent=1)) # from env vars monkeypatch.setenv("VALUE", "1") - monkeypatch.setenv("VALUE_ALSO_REQUIRED", "10") - monkeypatch.setenv( - "VALUE_NULLABLE_REQUIRED", "null" - ) # WARNING: set this env to None would not work w/o ``parse_none`` validator! bug??? + monkeypatch.setenv("VALUE_REQUIRED_AS_WELL", "10") + monkeypatch.setenv("VALUE_NULLABLE_REQUIRED", "null") + monkeypatch.setenv("VALUE_NULLABLE_REQUIRED_AS_WELL", None) - settings_from_env = Settings() - print(settings_from_env.json(exclude_unset=True, indent=1)) + settings_from_env = Settings() # type: ignore[call-arg] + print(settings_from_env.model_dump_json(exclude_unset=True, indent=1)) assert settings_from_init == settings_from_env # mixed - settings_from_both = Settings(VALUE_NULLABLE_REQUIRED=3) - print(settings_from_both.json(exclude_unset=True, indent=1)) + settings_from_both = Settings(VALUE_NULLABLE_REQUIRED=3) # type: ignore[call-arg] + print(settings_from_both.model_dump_json(exclude_unset=True, indent=1)) - assert settings_from_both == settings_from_init.copy( + assert settings_from_both == settings_from_init.model_copy( update={"VALUE_NULLABLE_REQUIRED": 3} ) From 18b7e377bede0bb2cc9637485f359bfd3fa3f6f3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 18:40:01 +0200 Subject: [PATCH 023/280] upgrade base module --- .../src/settings_library/base.py | 192 +++++++++--------- 1 file changed, 100 insertions(+), 92 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 60aeb076954..7e3ca51dfb0 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,13 +1,11 @@ import logging -from collections.abc import Sequence from functools import cached_property -from typing import Final, get_args, get_origin +from types import UnionType +from typing import Any, Final, get_args, get_origin -from pydantic import BaseConfig, ConfigError, Extra, ValidationError, field_validator -from pydantic.error_wrappers import ErrorList, ErrorWrapper -from pydantic.fields import ModelField, Undefined -from pydantic.typing import is_literal_type -from pydantic_settings import BaseSettings +from pydantic import PydanticUserError, ValidationInfo, field_validator +from pydantic.fields import FieldInfo +from pydantic_settings import BaseSettings, SettingsConfigDict _logger = logging.getLogger(__name__) @@ -16,43 +14,50 @@ ] = "%s auto_default_from_env unresolved, defaulting to None" -class DefaultFromEnvFactoryError(ValidationError): +class DefaultFromEnvFactoryError(PydanticUserError): ... -def create_settings_from_env(field: ModelField): - # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) - # this value is still not resolved (field.type_ at that moment has a weak_ref). - # Therefore we keep the entire 'field' but MUST be treated here as read-only +def allows_none(info: FieldInfo) -> bool: + origin = get_origin(info.annotation) # X | None or Optional[X] will return Union + if origin is UnionType: + return any(x in get_args(info.annotation) for x in (type(None), Any)) + return False - def _default_factory(): - """Creates default from sub-settings or None (if nullable)""" - field_settings_cls = field.type_ - try: - return field_settings_cls() - except ValidationError as err: - if field.allow_none: - # e.g. Optional[PostgresSettings] would warn if defaults to None - _logger.warning( - _DEFAULTS_TO_NONE_MSG, - field.name, - ) - return None +# def create_settings_from_env(field): +# # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) +# # this value is still not resolved (field.type_ at that moment has a weak_ref). +# # Therefore we keep the entire 'field' but MUST be treated here as read-only - def _prepend_field_name(ee: ErrorList): - if isinstance(ee, ErrorWrapper): - return ErrorWrapper(ee.exc, (field.name, *ee.loc_tuple())) - assert isinstance(ee, Sequence) # nosec - return [_prepend_field_name(e) for e in ee] +# def _default_factory(): +# """Creates default from sub-settings or None (if nullable)""" +# field_settings_cls = field.type_ +# try: +# return field_settings_cls() - raise DefaultFromEnvFactoryError( - errors=_prepend_field_name(err.raw_errors), - model=err.model, - # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field - ) from err +# except ValidationError as err: +# if field.allow_none: +# # e.g. Optional[PostgresSettings] would warn if defaults to None +# _logger.warning( +# _DEFAULTS_TO_NONE_MSG, +# field.name, +# ) +# return None - return _default_factory +# def _prepend_field_name(ee: ErrorList): +# if isinstance(ee, ErrorWrapper): +# return ErrorWrapper(ee.exc, (field.name, *ee.loc_tuple())) +# assert isinstance(ee, Sequence) # nosec +# return [_prepend_field_name(e) for e in ee] + +# raise DefaultFromEnvFactoryError( +# errors=_prepend_field_name(err.raw_errors), +# model=err.model, +# # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field +# ) from err + +# return _default_factory class BaseCustomSettings(BaseSettings): @@ -66,66 +71,69 @@ class BaseCustomSettings(BaseSettings): @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, field: ModelField): + def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if field.allow_none and isinstance(v, str) and v.lower() in ("null", "none"): + if ( + info.field_name + and allows_none(cls.model_fields[info.field_name]) + and isinstance(v, str) + and v.lower() in ("null", "none") + ): return None return v - # TODO[pydantic]: The `Config` class inherits from another class, please create the `model_config` manually. - # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. - class Config(BaseConfig): - case_sensitive = True # All must be capitalized - extra = Extra.forbid - allow_mutation = False - frozen = True - validate_all = True - keep_untouched = (cached_property,) - - @classmethod - def prepare_field(cls, field: ModelField) -> None: - super().prepare_field(field) - - auto_default_from_env = field.field_info.extra.get( - "auto_default_from_env", False - ) - - field_type = field.type_ - if args := get_args(field_type): - field_type = next(a for a in args if a != type(None)) - - # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models - is_not_composed = ( - get_origin(field_type) is None - ) # is not composed as dict[str, Any] or Generic[Base] - # avoid literals raising TypeError - is_not_literal = is_literal_type(field.type_) is False - - if ( - is_not_literal - and is_not_composed - and issubclass(field_type, BaseCustomSettings) - ): - if auto_default_from_env: - assert field.field_info.default is Undefined - assert field.field_info.default_factory is None - - # Transform it into something like `Field(default_factory=create_settings_from_env(field))` - field.default_factory = create_settings_from_env(field) - field.default = None - field.required = False # has a default now - - elif ( - is_not_literal - and is_not_composed - and issubclass(field_type, BaseSettings) - ): - msg = f"{cls}.{field.name} of type {field_type} must inherit from BaseCustomSettings" - raise ConfigError(msg) - - elif auto_default_from_env: - msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{field.name} is {field_type} " - raise ConfigError(msg) + model_config = SettingsConfigDict( + case_sensitive=True, # All must be capitalized + extra="forbid", + frozen=True, + validate_default=True, + ignored_types=(cached_property,), + ) + + # @classmethod + # def prepare_field(cls, field: ModelField) -> None: + # super().prepare_field(field) + + # auto_default_from_env = field.field_info.extra.get( + # "auto_default_from_env", False + # ) + + # field_type = field.type_ + # if args := get_args(field_type): + # field_type = next(a for a in args if a != type(None)) + + # # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models + # is_not_composed = ( + # get_origin(field_type) is None + # ) # is not composed as dict[str, Any] or Generic[Base] + # # avoid literals raising TypeError + # is_not_literal = is_literal_type(field.type_) is False + + # if ( + # is_not_literal + # and is_not_composed + # and issubclass(field_type, BaseCustomSettings) + # ): + # if auto_default_from_env: + # assert field.field_info.default is Undefined + # assert field.field_info.default_factory is None + + # # Transform it into something like `Field(default_factory=create_settings_from_env(field))` + # field.default_factory = create_settings_from_env(field) + # field.default = None + # field.required = False # has a default now + + # elif ( + # is_not_literal + # and is_not_composed + # and issubclass(field_type, BaseSettings) + # ): + # msg = f"{cls}.{field.name} of type {field_type} must inherit from BaseCustomSettings" + # raise ConfigError(msg) + + # elif auto_default_from_env: + # msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{field.name} is {field_type} " + # raise ConfigError(msg) @classmethod def create_from_envs(cls, **overrides): From dc3d6021cb1883104f5e763daa073fac0a7ce519 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 19:30:41 +0200 Subject: [PATCH 024/280] fix ByteSize values Since Pydantic v2, ByteSize are validated and must be >= 0. --- packages/aws-library/tests/test_ec2_models.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/aws-library/tests/test_ec2_models.py b/packages/aws-library/tests/test_ec2_models.py index f7c114932be..cfbcaa17c4c 100644 --- a/packages/aws-library/tests/test_ec2_models.py +++ b/packages/aws-library/tests/test_ec2_models.py @@ -88,9 +88,9 @@ def test_resources_gt_operator(a: Resources, b: Resources, a_greater_than_b: boo Resources(cpus=1, ram=ByteSize(34)), ), ( - Resources(cpus=0.1, ram=ByteSize(-1)), + Resources(cpus=0.1, ram=ByteSize(1)), Resources(cpus=1, ram=ByteSize(34)), - Resources(cpus=1.1, ram=ByteSize(33)), + Resources(cpus=1.1, ram=ByteSize(35)), ), ], ) @@ -108,14 +108,14 @@ def test_resources_create_as_empty(): "a,b,result", [ ( - Resources(cpus=0, ram=ByteSize(0)), - Resources(cpus=1, ram=ByteSize(34)), - Resources.construct(cpus=-1, ram=ByteSize(-34)), + Resources(cpus=0, ram=ByteSize(34)), + Resources(cpus=1, ram=ByteSize(0)), + Resources.construct(cpus=-1, ram=ByteSize(34)), ), ( - Resources(cpus=0.1, ram=ByteSize(-1)), - Resources(cpus=1, ram=ByteSize(34)), - Resources.construct(cpus=-0.9, ram=ByteSize(-35)), + Resources(cpus=0.1, ram=ByteSize(34)), + Resources(cpus=1, ram=ByteSize(1)), + Resources.construct(cpus=-0.9, ram=ByteSize(33)), ), ], ) From 724bed4285931f4ce44d01aeec2255da4d3ab240 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 19:53:30 +0200 Subject: [PATCH 025/280] replace deprecated parse_obj_as method --- .../aws-library/src/aws_library/s3/_client.py | 13 ++-- .../src/aws_library/s3/_constants.py | 14 ++-- .../aws-library/src/aws_library/s3/_utils.py | 4 +- packages/aws-library/tests/test_ec2_models.py | 6 +- packages/aws-library/tests/test_s3_client.py | 68 ++++++++++--------- packages/aws-library/tests/test_s3_utils.py | 65 ++++++++++++++---- 6 files changed, 108 insertions(+), 62 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 4ddb2bfb9c2..ef83ee40d79 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -15,7 +15,7 @@ from botocore.client import Config from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.logging_utils import log_catch, log_context from servicelib.utils import limited_gather from settings_library.s3 import S3Settings @@ -260,7 +260,7 @@ async def create_single_presigned_download_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) + url: AnyUrl = TypeAdapter(AnyUrl).validate_python(generated_link) return url @s3_exception_handler(_logger) @@ -274,7 +274,7 @@ async def create_single_presigned_upload_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) + url: AnyUrl = TypeAdapter(AnyUrl).validate_python(generated_link) return url @s3_exception_handler(_logger) @@ -298,8 +298,7 @@ async def create_multipart_upload_links( # compute the number of links, based on the announced file size num_upload_links, chunk_size = compute_num_file_chunks(file_size) # now create the links - upload_links = parse_obj_as( - list[AnyUrl], + upload_links = TypeAdapter(list[AnyUrl]).validate_python( await asyncio.gather( *( self._client.generate_presigned_url( @@ -473,7 +472,7 @@ def is_multipart(file_size: ByteSize) -> bool: @staticmethod def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl: - url: AnyUrl = parse_obj_as( - AnyUrl, f"s3://{bucket}/{urllib.parse.quote(object_key)}" + url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + f"s3://{bucket}/{urllib.parse.quote(object_key)}" ) return url diff --git a/packages/aws-library/src/aws_library/s3/_constants.py b/packages/aws-library/src/aws_library/s3/_constants.py index 05f2b3dc6d6..a94cd555f43 100644 --- a/packages/aws-library/src/aws_library/s3/_constants.py +++ b/packages/aws-library/src/aws_library/s3/_constants.py @@ -1,10 +1,14 @@ from typing import Final -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter # NOTE: AWS S3 upload limits https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html -MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") -MULTIPART_COPY_THRESHOLD: Final[ByteSize] = parse_obj_as(ByteSize, "100MiB") +MULTIPART_UPLOADS_MIN_TOTAL_SIZE: Final[ByteSize] = TypeAdapter( + ByteSize +).validate_python("100MiB") +MULTIPART_COPY_THRESHOLD: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "100MiB" +) -PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5GiB") -S3_MAX_FILE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "5TiB") +PRESIGNED_LINK_MAX_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5GiB") +S3_MAX_FILE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("5TiB") diff --git a/packages/aws-library/src/aws_library/s3/_utils.py b/packages/aws-library/src/aws_library/s3/_utils.py index 00a1bcc59bb..96ad59f57d3 100644 --- a/packages/aws-library/src/aws_library/s3/_utils.py +++ b/packages/aws-library/src/aws_library/s3/_utils.py @@ -1,13 +1,13 @@ from typing import Final -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter _MULTIPART_MAX_NUMBER_OF_PARTS: Final[int] = 10000 # this is artifically defined, if possible we keep a maximum number of requests for parallel # uploading. If that is not possible then we create as many upload part as the max part size allows _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE: Final[list[ByteSize]] = [ - parse_obj_as(ByteSize, x) + TypeAdapter(ByteSize).validate_python(x) for x in [ "10Mib", "50Mib", diff --git a/packages/aws-library/tests/test_ec2_models.py b/packages/aws-library/tests/test_ec2_models.py index cfbcaa17c4c..5d39b4cb746 100644 --- a/packages/aws-library/tests/test_ec2_models.py +++ b/packages/aws-library/tests/test_ec2_models.py @@ -6,7 +6,7 @@ import pytest from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2InstanceData, Resources from faker import Faker -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError @pytest.mark.parametrize( @@ -129,10 +129,10 @@ def test_resources_sub(a: Resources, b: Resources, result: Resources): def test_aws_tag_key_invalid(ec2_tag_key: str): # for a key it raises with pytest.raises(ValidationError): - parse_obj_as(AWSTagKey, ec2_tag_key) + TypeAdapter(AWSTagKey).validate_python(ec2_tag_key) # for a value it does not - parse_obj_as(AWSTagValue, ec2_tag_key) + TypeAdapter(AWSTagValue).validate_python(ec2_tag_key) def test_ec2_instance_data_hashable(faker: Faker): diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 93ee29fe5b0..b44d888923d 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -32,7 +32,7 @@ from models_library.api_schemas_storage import S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.parametrizations import ( @@ -67,7 +67,9 @@ async def simcore_s3_api( @pytest.fixture def bucket_name(faker: Faker) -> S3BucketName: # NOTE: no faker here as we need some specific namings - return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + return TypeAdapter(S3BucketName).validate_python( + faker.pystr().replace("_", "-").lower() + ) @pytest.fixture @@ -89,7 +91,9 @@ async def with_s3_bucket( @pytest.fixture def non_existing_s3_bucket(faker: Faker) -> S3BucketName: - return parse_obj_as(S3BucketName, faker.pystr().replace("_", "-").lower()) + return TypeAdapter(S3BucketName).validate_python( + faker.pystr().replace("_", "-").lower() + ) @pytest.fixture @@ -107,7 +111,7 @@ async def _( file, MultiPartUploadLinks( upload_id="fake", - chunk_size=parse_obj_as(ByteSize, file.stat().st_size), + chunk_size=TypeAdapter(ByteSize).validate_python(file.stat().st_size), urls=[presigned_url], ), ) @@ -131,7 +135,7 @@ async def with_uploaded_file_on_s3( s3_client: S3Client, with_s3_bucket: S3BucketName, ) -> AsyncIterator[UploadedFile]: - test_file = create_file_of_size(parse_obj_as(ByteSize, "10Kib")) + test_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("10Kib")) await s3_client.upload_file( Filename=f"{test_file}", Bucket=with_s3_bucket, @@ -200,7 +204,7 @@ async def _uploader( object_key=object_key, file_size=ByteSize(file.stat().st_size), expiration_secs=default_expiration_time_seconds, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + sha256_checksum=TypeAdapter(SHA256Str).validate_python(faker.sha256()), ) assert upload_links @@ -586,7 +590,7 @@ async def test_undelete_file( assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size # upload another file on top of the existing one - new_file = create_file_of_size(parse_obj_as(ByteSize, "5Kib")) + new_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("5Kib")) await s3_client.upload_file( Filename=f"{new_file}", Bucket=with_s3_bucket, @@ -741,7 +745,7 @@ async def test_create_single_presigned_upload_link( [Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None] ], ): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib")) s3_object_key = file.name presigned_url = await simcore_s3_api.create_single_presigned_upload_link( bucket=with_s3_bucket, @@ -769,7 +773,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, ): - file = create_file_of_size(parse_obj_as(ByteSize, "1Mib")) + file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib")) s3_object_key = file.name with pytest.raises(S3BucketInvalidError): await simcore_s3_api.create_single_presigned_upload_link( @@ -863,7 +867,7 @@ async def test_create_multipart_presigned_upload_link_invalid_raises( object_key=faker.pystr(), file_size=ByteSize(file.stat().st_size), expiration_secs=default_expiration_time_seconds, - sha256_checksum=parse_obj_as(SHA256Str, faker.sha256()), + sha256_checksum=TypeAdapter(SHA256Str).validate_python(faker.sha256()), ) # completing with invalid bucket @@ -1076,7 +1080,7 @@ async def test_copy_file_invalid_raises( create_file_of_size: Callable[[ByteSize], Path], faker: Faker, ): - file = create_file_of_size(parse_obj_as(ByteSize, "1MiB")) + file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1MiB")) uploaded_file = await upload_file(file) dst_object_key = faker.file_name() # NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError @@ -1101,9 +1105,9 @@ async def test_copy_file_invalid_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1127,9 +1131,9 @@ async def test_get_directory_metadata( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1159,9 +1163,9 @@ async def test_get_directory_metadata_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1195,9 +1199,9 @@ async def test_delete_file_recursively( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1233,9 +1237,9 @@ async def test_delete_file_recursively_raises( "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1333,14 +1337,14 @@ def run_async_test(*args, **kwargs) -> None: "directory_size, min_file_size, max_file_size", [ ( - parse_obj_as(ByteSize, "1Mib"), - parse_obj_as(ByteSize, "1B"), - parse_obj_as(ByteSize, "10Kib"), + TypeAdapter(ByteSize).validate_python("1Mib"), + TypeAdapter(ByteSize).validate_python("1B"), + TypeAdapter(ByteSize).validate_python("10Kib"), ), ( - parse_obj_as(ByteSize, "500Mib"), - parse_obj_as(ByteSize, "10Mib"), - parse_obj_as(ByteSize, "50Mib"), + TypeAdapter(ByteSize).validate_python("500Mib"), + TypeAdapter(ByteSize).validate_python("10Mib"), + TypeAdapter(ByteSize).validate_python("50Mib"), ), ], ids=byte_size_ids, diff --git a/packages/aws-library/tests/test_s3_utils.py b/packages/aws-library/tests/test_s3_utils.py index 5354da8bc66..cfba1634943 100644 --- a/packages/aws-library/tests/test_s3_utils.py +++ b/packages/aws-library/tests/test_s3_utils.py @@ -10,23 +10,63 @@ _MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE, compute_num_file_chunks, ) -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.parametrizations import byte_size_ids @pytest.mark.parametrize( "file_size, expected_num_chunks, expected_chunk_size", [ - (parse_obj_as(ByteSize, "5Mib"), 1, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "10Mib"), 1, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "20Mib"), 2, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "50Mib"), 5, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "150Mib"), 15, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "550Mib"), 55, parse_obj_as(ByteSize, "10Mib")), - (parse_obj_as(ByteSize, "560Gib"), 5735, parse_obj_as(ByteSize, "100Mib")), - (parse_obj_as(ByteSize, "5Tib"), 8739, parse_obj_as(ByteSize, "600Mib")), - (parse_obj_as(ByteSize, "15Tib"), 7680, parse_obj_as(ByteSize, "2Gib")), - (parse_obj_as(ByteSize, "9431773844"), 900, parse_obj_as(ByteSize, "10Mib")), + ( + TypeAdapter(ByteSize).validate_python("5Mib"), + 1, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("10Mib"), + 1, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("20Mib"), + 2, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("50Mib"), + 5, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("150Mib"), + 15, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("550Mib"), + 55, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("560Gib"), + 5735, + TypeAdapter(ByteSize).validate_python("100Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("5Tib"), + 8739, + TypeAdapter(ByteSize).validate_python("600Mib"), + ), + ( + TypeAdapter(ByteSize).validate_python("15Tib"), + 7680, + TypeAdapter(ByteSize).validate_python("2Gib"), + ), + ( + TypeAdapter(ByteSize).validate_python("9431773844"), + 900, + TypeAdapter(ByteSize).validate_python("10Mib"), + ), ], ids=byte_size_ids, ) @@ -39,8 +79,7 @@ def test_compute_num_file_chunks( def test_enormous_file_size_raises_value_error(): - enormous_file_size = parse_obj_as( - ByteSize, + enormous_file_size = TypeAdapter(ByteSize).validate_python( ( max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE) * _MULTIPART_MAX_NUMBER_OF_PARTS From f2f007e9cc3360bd7bc9ba9f9d4548d7f229dcc5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 20:06:00 +0200 Subject: [PATCH 026/280] fix regex validation with look-ahead assertion In Pydantic v2, the pydantic-core library no longer supports advanced regex features such as look-ahead assertions (like ^(?!...)). --- packages/aws-library/src/aws_library/ec2/_models.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index a43d36bc014..a1c79bdf179 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -6,6 +6,7 @@ import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag from pydantic import ( + AfterValidator, BaseModel, ByteSize, ConfigDict, @@ -73,8 +74,9 @@ class EC2InstanceType: StringConstraints( min_length=1, max_length=128, - pattern=r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$", + pattern=r"^[a-zA-Z0-9\+\-=\._:@]+$", ), + AfterValidator(lambda v: v not in {"_index", ".", ".."} or ValueError("Field cannot be '_index', '.', or '..'.")) ] # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] From 76f1d5edc255d6e8e5ae6018df261d5da9e066ff Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 20:33:06 +0200 Subject: [PATCH 027/280] fix validate tag_key --- packages/aws-library/src/aws_library/ec2/_models.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index a1c79bdf179..b18a7e8c38e 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -67,6 +67,10 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str +def _validate_tag_key(value: str): + if value in {"_index", ".", ".."}: + raise ValueError("Field cannot be '_index', '.', or '..'.") + return value # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] AWSTagKey: TypeAlias = Annotated[ @@ -76,7 +80,7 @@ class EC2InstanceType: max_length=128, pattern=r"^[a-zA-Z0-9\+\-=\._:@]+$", ), - AfterValidator(lambda v: v not in {"_index", ".", ".."} or ValueError("Field cannot be '_index', '.', or '..'.")) + AfterValidator(_validate_tag_key) ] # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] From 2112122ca4454ce8ba709d9cc4513ff41151d68c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 20:49:34 +0200 Subject: [PATCH 028/280] remove deprecated --- packages/pytest-simcore/src/pytest_simcore/aws_server.py | 6 +++--- .../src/pytest_simcore/helpers/parametrizations.py | 4 ++-- packages/pytest-simcore/src/pytest_simcore/helpers/s3.py | 4 ++-- packages/service-library/src/servicelib/utils_secrets.py | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index 077fb25d51a..78a03a19d3f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -3,6 +3,7 @@ # pylint: disable=unused-import from collections.abc import Iterator +from typing import Annotated from unittest import mock import pytest @@ -11,7 +12,7 @@ from faker import Faker from models_library.utils.fastapi_encoders import jsonable_encoder from moto.server import ThreadedMotoServer -from pydantic import AnyHttpUrl, SecretStr, parse_obj_as +from pydantic import AnyHttpUrl, SecretStr, TypeAdapter, parse_obj_as from pytest_mock.plugin import MockerFixture from settings_library.basic_types import IDStr from settings_library.ec2 import EC2Settings @@ -124,8 +125,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=parse_obj_as( - AnyHttpUrl, + S3_ENDPOINT=TypeAdapter(Annotated[str, AnyHttpUrl]).validate_python( f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 ), S3_SECRET_KEY=IDStr("xxx"), diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py index ed6381f5611..6eae044643b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/parametrizations.py @@ -1,6 +1,6 @@ import pytest from _pytest.mark.structures import ParameterSet -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter def byte_size_ids(val) -> str | None: @@ -10,4 +10,4 @@ def byte_size_ids(val) -> str | None: def parametrized_file_size(size_str: str) -> ParameterSet: - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str) + return pytest.param(TypeAdapter(ByteSize).validate_python(size_str), id=size_str) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index 5d7e721a832..b6e1a0de08f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -8,14 +8,14 @@ from aiohttp import ClientSession from aws_library.s3 import MultiPartUploadLinks from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from servicelib.utils import limited_as_completed, logged_gather from types_aiobotocore_s3 import S3Client from .logging_tools import log_context -_SENDER_CHUNK_SIZE: Final[int] = parse_obj_as(ByteSize, "16Mib") +_SENDER_CHUNK_SIZE: Final[int] = TypeAdapter(ByteSize).validate_python("16Mib") async def _file_sender( diff --git a/packages/service-library/src/servicelib/utils_secrets.py b/packages/service-library/src/servicelib/utils_secrets.py index 7b74a491080..67c440ce044 100644 --- a/packages/service-library/src/servicelib/utils_secrets.py +++ b/packages/service-library/src/servicelib/utils_secrets.py @@ -2,7 +2,7 @@ import string from typing import Final -from pydantic import StrictInt, validate_arguments +from pydantic import StrictInt, validate_call MIN_PASSWORD_LENGTH = 30 _SAFE_SYMBOLS = "!$%*+,-.:=?@^_~" # avoid issues with parsing, espapes etc @@ -48,7 +48,7 @@ def are_secrets_equal(got: str, expected: str) -> bool: return secrets.compare_digest(got.encode("utf8"), expected.encode("utf8")) -@validate_arguments +@validate_call def secure_randint(start: StrictInt, end: StrictInt) -> int: """Generate a random integer between start (inclusive) and end (exclusive).""" if start >= end: From 7404f3e75d9f314b048a3eed74e97dba34a01228 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 19 Sep 2024 22:42:46 +0200 Subject: [PATCH 029/280] continue upgrading --- .../src/models_library/api_schemas_storage.py | 13 +++++++++---- .../src/models_library/basic_regex.py | 6 ++++++ .../pytest-simcore/src/pytest_simcore/aws_server.py | 3 +-- .../settings-library/src/settings_library/ec2.py | 5 +++-- .../settings-library/src/settings_library/s3.py | 9 ++++++--- 5 files changed, 25 insertions(+), 11 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py index fac13d42d40..7a18334dae4 100644 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ b/packages/models-library/src/models_library/api_schemas_storage.py @@ -8,10 +8,12 @@ from datetime import datetime from enum import Enum +from functools import partial from typing import Annotated, Any, TypeAlias from uuid import UUID from pydantic import ( + AfterValidator, BaseModel, ByteSize, ConfigDict, @@ -24,7 +26,7 @@ ) from pydantic.networks import AnyUrl -from .basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE +from .basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE, validate_re from .basic_types import SHA256Str from .generics import ListModel from .projects_nodes_io import ( @@ -37,10 +39,13 @@ ETag: TypeAlias = str +S3BucketName: TypeAlias = Annotated[ + str, AfterValidator(partial(validate_re, S3_BUCKET_NAME_RE)) +] -S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)] - -DatCoreDatasetName: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE)] +DatCoreDatasetName: TypeAlias = Annotated[ + str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE) +] # / diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index 51441fe39e6..269d4b1987a 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -77,3 +77,9 @@ ) PROPERTY_KEY_RE = r"^[-_a-zA-Z0-9]+$" # TODO: PC->* it would be advisable to have this "variable friendly" (see VARIABLE_NAME_RE) + + +def validate_re(pattern: str, value: str): + if not re.compile(pattern).match(value): + raise ValueError(f"The {value} doesn't match the {pattern=}.") + return value diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index 78a03a19d3f..12c3224e915 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -3,7 +3,6 @@ # pylint: disable=unused-import from collections.abc import Iterator -from typing import Annotated from unittest import mock import pytest @@ -125,7 +124,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=TypeAdapter(Annotated[str, AnyHttpUrl]).validate_python( + S3_ENDPOINT=TypeAdapter(AnyHttpUrl).validate_python( f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 ), S3_SECRET_KEY=IDStr("xxx"), diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index a28fd9335c8..08876fd6083 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,4 +1,5 @@ -from pydantic import ConfigDict, Field +from pydantic import Field +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -11,7 +12,7 @@ class EC2Settings(BaseCustomSettings): EC2_REGION_NAME: str = "us-east-1" EC2_SECRET_ACCESS_KEY: str - model_config = ConfigDict( + model_config = SettingsConfigDict( json_schema_extra={ "examples": [ { diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index 5e971283d46..269ba56b731 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,4 +1,7 @@ -from pydantic import AnyHttpUrl, ConfigDict, Field +from typing import Annotated + +from pydantic import AfterValidator, AnyHttpUrl, Field +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import IDStr @@ -7,13 +10,13 @@ class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: AnyHttpUrl | None = Field( + S3_ENDPOINT: Annotated[AnyHttpUrl, AfterValidator(str)] | None = Field( default=None, description="do not define if using standard AWS" ) S3_REGION: IDStr S3_SECRET_KEY: IDStr - model_config = ConfigDict( + model_config = SettingsConfigDict( json_schema_extra={ "examples": [ { From 7e156d72a4e0f34481041e7edebef504fc2e4f15 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 09:16:56 +0200 Subject: [PATCH 030/280] fix regex --- packages/aws-library/src/aws_library/ec2/_models.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index b18a7e8c38e..ed260ad6ff0 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -1,4 +1,5 @@ import datetime +import re import tempfile from dataclasses import dataclass from typing import Annotated, TypeAlias @@ -6,7 +7,6 @@ import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag from pydantic import ( - AfterValidator, BaseModel, ByteSize, ConfigDict, @@ -67,10 +67,6 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str -def _validate_tag_key(value: str): - if value in {"_index", ".", ".."}: - raise ValueError("Field cannot be '_index', '.', or '..'.") - return value # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] AWSTagKey: TypeAlias = Annotated[ @@ -78,9 +74,8 @@ def _validate_tag_key(value: str): StringConstraints( min_length=1, max_length=128, - pattern=r"^[a-zA-Z0-9\+\-=\._:@]+$", + pattern=re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$"), ), - AfterValidator(_validate_tag_key) ] # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] From 1401f2e6dfa4a4d6d12bab788d28f105ffc085c3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 09:41:29 +0200 Subject: [PATCH 031/280] fix error base class --- packages/aws-library/src/aws_library/s3/_errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_errors.py b/packages/aws-library/src/aws_library/s3/_errors.py index f297b04b64d..9921e6c20dc 100644 --- a/packages/aws-library/src/aws_library/s3/_errors.py +++ b/packages/aws-library/src/aws_library/s3/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class S3RuntimeError(PydanticErrorMixin, RuntimeError): +class S3RuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "S3 client unexpected error" From 5f553ced7d6c6245df754fd1ea4647553cc93d82 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:29:16 +0200 Subject: [PATCH 032/280] continue upgrading --- packages/aws-library/tests/test_s3_client.py | 4 ++-- packages/pytest-simcore/src/pytest_simcore/helpers/s3.py | 2 +- packages/settings-library/src/settings_library/ssm.py | 9 ++++++--- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index b44d888923d..a7ef20d698c 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -696,7 +696,7 @@ async def test_create_single_presigned_download_link( dest_file = tmp_path / faker.file_name() async with ClientSession() as session: - response = await session.get(download_url) + response = await session.get(str(download_url)) response.raise_for_status() with dest_file.open("wb") as fp: fp.write(await response.read()) @@ -1302,7 +1302,7 @@ def test_compute_s3_url( bucket: S3BucketName, object_key: S3ObjectKey, expected_s3_url: AnyUrl ): assert ( - SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key) + str(SimcoreS3API.compute_s3_url(bucket=bucket, object_key=object_key)) == expected_s3_url ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py index b6e1a0de08f..2f0a03b575d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/s3.py @@ -51,7 +51,7 @@ async def upload_file_part( f"--> uploading {this_file_chunk_size=} of {file=}, [{part_index+1}/{num_parts}]..." ) response = await session.put( - upload_url, + str(upload_url), data=_file_sender( file, offset=file_offset, diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 05c5200a0b5..0fb0ec86da6 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,17 +1,20 @@ -from pydantic import AnyHttpUrl, ConfigDict, Field, SecretStr +from typing import Annotated + +from pydantic import AfterValidator, AnyHttpUrl, Field, SecretStr +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr - SSM_ENDPOINT: AnyHttpUrl | None = Field( + SSM_ENDPOINT: Annotated[AnyHttpUrl, AfterValidator(str)] | None = Field( default=None, description="do not define if using standard AWS" ) SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr - model_config = ConfigDict( + model_config = SettingsConfigDict( json_schema_extra={ "examples": [ { From 6ed05516a5a88e88784d93f4b0473fb82113c574 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:32:39 +0200 Subject: [PATCH 033/280] upgrade dependencies --- packages/aws-library/requirements/_base.txt | 14 ++++++++++++++ packages/aws-library/requirements/_test.txt | 4 +++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 69125f953cf..53832fe75c5 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -122,8 +122,20 @@ pydantic==2.9.1 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings pydantic-core==2.23.3 # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -132,6 +144,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index ace30f36117..1a9b4a7afa2 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -201,7 +201,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt From c36b542820d759bafdf8f6f728b038f778ceff52 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:35:24 +0200 Subject: [PATCH 034/280] fix error base class --- packages/aws-library/src/aws_library/ssm/_errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/aws-library/src/aws_library/ssm/_errors.py b/packages/aws-library/src/aws_library/ssm/_errors.py index 32300d08d29..73f050d8d2c 100644 --- a/packages/aws-library/src/aws_library/ssm/_errors.py +++ b/packages/aws-library/src/aws_library/ssm/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class SSMRuntimeError(PydanticErrorMixin, RuntimeError): +class SSMRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "SSM client unexpected error" From 49a0ab4ee1a6ef1e891f0f11989af2d1555204b9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:38:51 +0200 Subject: [PATCH 035/280] upgrade dependencies --- .../dask-task-models-library/requirements/_base.txt | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index aea8fcde117..81807f5ffe6 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -75,12 +75,22 @@ pydantic==2.9.1 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in + # pydantic-extra-types + # pydantic-settings pydantic-core==2.23.3 # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt From 0191717eaddaaaff5579f3a71802324961e1651f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:45:57 +0200 Subject: [PATCH 036/280] continue upgrading --- .../dask_task_models_library/container_tasks/errors.py | 8 ++++---- .../dask_task_models_library/container_tasks/protocol.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py index f4060531f7f..6667455d9a4 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py @@ -1,19 +1,19 @@ """ Dask task exceptions """ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class TaskValueError(PydanticErrorMixin, ValueError): +class TaskValueError(OsparcErrorMixin, ValueError): code = "task.value_error" -class TaskCancelledError(PydanticErrorMixin, RuntimeError): +class TaskCancelledError(OsparcErrorMixin, RuntimeError): code = "task.cancelled_error" msg_template = "The task was cancelled" -class ServiceRuntimeError(PydanticErrorMixin, RuntimeError): +class ServiceRuntimeError(OsparcErrorMixin, RuntimeError): code = "service.runtime_error" msg_template = ( "The service {service_key}:{service_version}" diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py index 3ad8fafdd2f..fd6acf554e0 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/protocol.py @@ -32,7 +32,7 @@ class TaskOwner(BaseModel): def has_parent(self) -> bool: return bool(self.parent_node_id and self.parent_project_id) - @model_validator(mode="after") + @model_validator(mode="before") @classmethod def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: parent_project_id = values.get("parent_project_id") From 2e74551d20da236683ca2e5cb5aa29fcf08c985a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:47:59 +0200 Subject: [PATCH 037/280] upgrade dependencies --- packages/notifications-library/requirements/_base.txt | 10 ++++++++++ packages/notifications-library/requirements/_test.txt | 4 +++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index f781af68ebf..a4df8b512d1 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -74,12 +74,22 @@ pydantic==2.9.1 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-extra-types + # pydantic-settings pydantic-core==2.23.3 # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings referencing==0.35.1 # via # jsonschema diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 25211dd50f2..ab645dfb576 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -66,7 +66,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt From c4e6fb2ae3062710af53dedd3c4cec2dbb69ed6e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 10:53:50 +0200 Subject: [PATCH 038/280] fix email model validator --- .../src/settings_library/email.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index 9a8a9d026bf..fe5f8448b34 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -1,4 +1,5 @@ from enum import Enum +from typing import Self from pydantic import model_validator from pydantic.fields import Field @@ -32,24 +33,22 @@ class SMTPSettings(BaseCustomSettings): SMTP_PASSWORD: SecretStr | None = Field(None, min_length=1) @model_validator(mode="after") - @classmethod - def _both_credentials_must_be_set(cls, values): - username = values.get("SMTP_USERNAME") - password = values.get("SMTP_PASSWORD") + def _both_credentials_must_be_set(self) -> Self: + username = self.SMTP_USERNAME + password = self.SMTP_PASSWORD if username is None and password or username and password is None: msg = f"Please provide both {username=} and {password=} not just one" raise ValueError(msg) - return values + return self @model_validator(mode="after") - @classmethod - def _enabled_tls_required_authentication(cls, values): - smtp_protocol = values.get("SMTP_PROTOCOL") + def _enabled_tls_required_authentication(self) -> Self: + smtp_protocol = self.SMTP_PROTOCOL - username = values.get("SMTP_USERNAME") - password = values.get("SMTP_PASSWORD") + username = self.SMTP_USERNAME + password = self.SMTP_PASSWORD tls_enabled = smtp_protocol == EmailProtocol.TLS starttls_enabled = smtp_protocol == EmailProtocol.STARTTLS @@ -57,7 +56,7 @@ def _enabled_tls_required_authentication(cls, values): if (tls_enabled or starttls_enabled) and not (username or password): msg = "when using SMTP_PROTOCOL other than UNENCRYPTED username and password are required" raise ValueError(msg) - return values + return self @property def has_credentials(self) -> bool: From 0846266700a17faf1080a960c79cb8ad28d0df78 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 11:24:10 +0200 Subject: [PATCH 039/280] ignore code type --- .../src/dask_task_models_library/container_tasks/errors.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py index 6667455d9a4..c94f2af10cb 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py @@ -5,16 +5,16 @@ class TaskValueError(OsparcErrorMixin, ValueError): - code = "task.value_error" + code = "task.value_error" # type: ignore[assignment] class TaskCancelledError(OsparcErrorMixin, RuntimeError): - code = "task.cancelled_error" + code = "task.cancelled_error" # type: ignore[assignment] msg_template = "The task was cancelled" class ServiceRuntimeError(OsparcErrorMixin, RuntimeError): - code = "service.runtime_error" + code = "service.runtime_error" # type: ignore[assignment] msg_template = ( "The service {service_key}:{service_version}" " running in container {container_id} failed with code" From 5601125b06391056260f8e999962b32833669883 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 11:40:52 +0200 Subject: [PATCH 040/280] fix validate_call --- packages/notifications-library/tests/with_db/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/notifications-library/tests/with_db/conftest.py b/packages/notifications-library/tests/with_db/conftest.py index bdd3d0f3d09..f6faa5b4895 100644 --- a/packages/notifications-library/tests/with_db/conftest.py +++ b/packages/notifications-library/tests/with_db/conftest.py @@ -14,7 +14,7 @@ from models_library.products import ProductName from models_library.users import GroupID, UserID from notifications_library._templates import get_default_named_templates -from pydantic import validate_arguments +from pydantic import validate_call from simcore_postgres_database.models.jinja2_templates import jinja2_templates from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products @@ -165,7 +165,7 @@ def set_template_to_product( sqlalchemy_async_engine: AsyncEngine, product: dict[str, Any] ): # NOTE: needs all fixture products in db - @validate_arguments + @validate_call async def _(template_name: IDStr, product_name: ProductName) -> None: async with sqlalchemy_async_engine.begin() as conn: await conn.execute( From d47edd08a3be2c818e8febcd2f772579c76c72ea Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 11:52:40 +0200 Subject: [PATCH 041/280] fix validate call --- packages/notifications-library/tests/with_db/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/notifications-library/tests/with_db/conftest.py b/packages/notifications-library/tests/with_db/conftest.py index f6faa5b4895..750f3cc24a4 100644 --- a/packages/notifications-library/tests/with_db/conftest.py +++ b/packages/notifications-library/tests/with_db/conftest.py @@ -179,7 +179,7 @@ async def _(template_name: IDStr, product_name: ProductName) -> None: @pytest.fixture def unset_template_to_product(sqlalchemy_async_engine: AsyncEngine): - @validate_arguments + @validate_call async def _(template_name: IDStr, product_name: ProductName) -> None: async with sqlalchemy_async_engine.begin() as conn: await conn.execute( From 6f38de2559e34ac5d7d077563d40e5fbec5dc3a6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 12:02:51 +0200 Subject: [PATCH 042/280] fix faker types --- .../src/pytest_simcore/faker_payments_data.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py index 9d675c45e11..3f4058b72e9 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_payments_data.py @@ -24,7 +24,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import EmailStr, HttpUrl, parse_obj_as +from pydantic import EmailStr, HttpUrl, TypeAdapter from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, ) @@ -34,27 +34,27 @@ @pytest.fixture def wallet_id(faker: Faker) -> WalletID: - return parse_obj_as(WalletID, faker.pyint()) + return TypeAdapter(WalletID).validate_python(faker.pyint()) @pytest.fixture def wallet_name(faker: Faker) -> IDStr: - return parse_obj_as(IDStr, f"wallet-{faker.word()}") + return TypeAdapter(IDStr).validate_python(f"wallet-{faker.word()}") @pytest.fixture -def invoice_url(faker: Faker) -> HttpUrl: - return parse_obj_as(HttpUrl, faker.image_url()) +def invoice_url(faker: Faker) -> str: + return faker.image_url() @pytest.fixture -def invoice_pdf_url(faker: Faker) -> HttpUrl: - return parse_obj_as(HttpUrl, faker.image_url()) +def invoice_pdf_url(faker: Faker) -> str: + return faker.image_url() @pytest.fixture def stripe_invoice_id(faker: Faker) -> StripeInvoiceID: - return parse_obj_as(StripeInvoiceID, f"in_{faker.word()}") + return TypeAdapter(StripeInvoiceID).validate_python(f"in_{faker.word()}") @pytest.fixture From e69b9ef357ebb0ebb1bd917fe71adc49936c100d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 13:00:35 +0200 Subject: [PATCH 043/280] continue upgrading --- packages/aws-library/src/aws_library/s3/_client.py | 2 +- packages/aws-library/src/aws_library/s3/_errors.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index ef83ee40d79..5503cecf3d4 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -78,7 +78,7 @@ async def create( ) assert isinstance(session_client, ClientCreatorContext) # nosec exit_stack = contextlib.AsyncExitStack() - s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) + s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] # NOTE: this triggers a botocore.exception.ClientError in case the connection is not made to the S3 backend await s3_client.list_buckets() diff --git a/packages/aws-library/src/aws_library/s3/_errors.py b/packages/aws-library/src/aws_library/s3/_errors.py index 9921e6c20dc..d3202822095 100644 --- a/packages/aws-library/src/aws_library/s3/_errors.py +++ b/packages/aws-library/src/aws_library/s3/_errors.py @@ -10,25 +10,25 @@ class S3NotConnectedError(S3RuntimeError): class S3AccessError(S3RuntimeError): - code = "s3_access.error" + code = "s3_access.error" # type: ignore[assignment] msg_template: str = "Unexpected error while accessing S3 backend" class S3BucketInvalidError(S3AccessError): - code = "s3_bucket.invalid_error" + code = "s3_bucket.invalid_error" # type: ignore[assignment] msg_template: str = "The bucket '{bucket}' is invalid" class S3KeyNotFoundError(S3AccessError): - code = "s3_key.not_found_error" + code = "s3_key.not_found_error" # type: ignore[assignment] msg_template: str = "The file {key} in {bucket} was not found" class S3UploadNotFoundError(S3AccessError): - code = "s3_upload.not_found_error" + code = "s3_upload.not_found_error" # type: ignore[assignment] msg_template: str = "The upload for {key} in {bucket} was not found" class S3DestinationNotEmptyError(S3AccessError): - code = "s3_destination.not_empty_error" + code = "s3_destination.not_empty_error" # type: ignore[assignment] msg_template: str = "The destination {dst_prefix} is not empty" From 9f17d5190d7cca85352e2c2d2a4a5d69c692eaa8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:08:00 +0200 Subject: [PATCH 044/280] continue upgrading --- packages/aws-library/src/aws_library/ec2/_client.py | 2 +- packages/aws-library/src/aws_library/s3/_client.py | 2 +- packages/aws-library/src/aws_library/ssm/_client.py | 6 ++---- .../pytest-simcore/src/pytest_simcore/aws_server.py | 11 +++-------- packages/settings-library/src/settings_library/ec2.py | 4 ++-- packages/settings-library/src/settings_library/s3.py | 6 ++---- 6 files changed, 11 insertions(+), 20 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/_client.py b/packages/aws-library/src/aws_library/ec2/_client.py index 14094939dde..12c2a27fff0 100644 --- a/packages/aws-library/src/aws_library/ec2/_client.py +++ b/packages/aws-library/src/aws_library/ec2/_client.py @@ -41,7 +41,7 @@ async def create(cls, settings: EC2Settings) -> "SimcoreEC2API": session = aioboto3.Session() session_client = session.client( "ec2", - endpoint_url=settings.EC2_ENDPOINT, + endpoint_url=str(settings.EC2_ENDPOINT), aws_access_key_id=settings.EC2_ACCESS_KEY_ID, aws_secret_access_key=settings.EC2_SECRET_ACCESS_KEY, region_name=settings.EC2_REGION_NAME, diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 5503cecf3d4..74364ce4e19 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -70,7 +70,7 @@ async def create( session = aioboto3.Session() session_client = session.client( "s3", - endpoint_url=settings.S3_ENDPOINT, + endpoint_url=str(settings.S3_ENDPOINT), aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET_KEY, region_name=settings.S3_REGION, diff --git a/packages/aws-library/src/aws_library/ssm/_client.py b/packages/aws-library/src/aws_library/ssm/_client.py index 2b51a93b82a..4cc09ac4eb8 100644 --- a/packages/aws-library/src/aws_library/ssm/_client.py +++ b/packages/aws-library/src/aws_library/ssm/_client.py @@ -49,16 +49,14 @@ async def create(cls, settings: SSMSettings) -> "SimcoreSSMAPI": session = aioboto3.Session() session_client = session.client( "ssm", - endpoint_url=settings.SSM_ENDPOINT, + endpoint_url=str(settings.SSM_ENDPOINT), aws_access_key_id=settings.SSM_ACCESS_KEY_ID.get_secret_value(), aws_secret_access_key=settings.SSM_SECRET_ACCESS_KEY.get_secret_value(), region_name=settings.SSM_REGION_NAME, ) assert isinstance(session_client, ClientCreatorContext) # nosec exit_stack = contextlib.AsyncExitStack() - ec2_client = cast( - SSMClient, await exit_stack.enter_async_context(session_client) - ) + ec2_client = cast(SSMClient, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] return cls(ec2_client, session, exit_stack) async def close(self) -> None: diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index 12c3224e915..b47844f893d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -75,7 +75,7 @@ def mocked_ec2_server_envs( mocked_ec2_server_settings: EC2Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_ec2_server_settings.dict() + changed_envs: EnvVarsDict = mocked_ec2_server_settings.model_dump() return setenvs_from_dict(monkeypatch, {**changed_envs}) @@ -101,10 +101,7 @@ def mocked_ssm_server_settings( ) -> SSMSettings: return SSMSettings( SSM_ACCESS_KEY_ID=SecretStr("xxx"), - SSM_ENDPOINT=parse_obj_as( - AnyHttpUrl, - f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 - ), + SSM_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 SSM_SECRET_ACCESS_KEY=SecretStr("xxx"), ) @@ -124,9 +121,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=TypeAdapter(AnyHttpUrl).validate_python( - f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 - ), + S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 S3_SECRET_KEY=IDStr("xxx"), S3_BUCKET_NAME=IDStr(f"pytest{faker.pystr().lower()}"), S3_REGION=IDStr("us-east-1"), diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index 08876fd6083..6a9ab6f65f5 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,4 +1,4 @@ -from pydantic import Field +from pydantic import AnyHttpUrl, Field from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -6,7 +6,7 @@ class EC2Settings(BaseCustomSettings): EC2_ACCESS_KEY_ID: str - EC2_ENDPOINT: str | None = Field( + EC2_ENDPOINT: AnyHttpUrl | None = Field( default=None, description="do not define if using standard AWS" ) EC2_REGION_NAME: str = "us-east-1" diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index 269ba56b731..18f23860658 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,6 +1,4 @@ -from typing import Annotated - -from pydantic import AfterValidator, AnyHttpUrl, Field +from pydantic import AnyHttpUrl, Field from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -10,7 +8,7 @@ class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: Annotated[AnyHttpUrl, AfterValidator(str)] | None = Field( + S3_ENDPOINT: AnyHttpUrl | None = Field( default=None, description="do not define if using standard AWS" ) S3_REGION: IDStr From d13f0e02374d44aab9fa54e0f206d6b435d35853 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:08:09 +0200 Subject: [PATCH 045/280] continue upgrading --- packages/settings-library/src/settings_library/ssm.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 0fb0ec86da6..bb009792e67 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,6 +1,4 @@ -from typing import Annotated - -from pydantic import AfterValidator, AnyHttpUrl, Field, SecretStr +from pydantic import AnyHttpUrl, Field, SecretStr from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -8,7 +6,7 @@ class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr - SSM_ENDPOINT: Annotated[AnyHttpUrl, AfterValidator(str)] | None = Field( + SSM_ENDPOINT: AnyHttpUrl | None = Field( default=None, description="do not define if using standard AWS" ) SSM_REGION_NAME: str = "us-east-1" From ef0e355fb94bb9eecedce962ab6d68ca385ee1f1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:13:07 +0200 Subject: [PATCH 046/280] fix endpoint_url type --- packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py index f971ef9b8f7..3f7bf0e96b3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py @@ -26,7 +26,7 @@ async def ec2_client( exit_stack = contextlib.AsyncExitStack() session_client = session.client( "ec2", - endpoint_url=ec2_settings.EC2_ENDPOINT, + endpoint_url=str(ec2_settings.EC2_ENDPOINT), aws_access_key_id=ec2_settings.EC2_ACCESS_KEY_ID, aws_secret_access_key=ec2_settings.EC2_SECRET_ACCESS_KEY, region_name=ec2_settings.EC2_REGION_NAME, From 6c30f367b2864c69d3851d4ce3542f2ab0fcfcdf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:14:31 +0200 Subject: [PATCH 047/280] fix mock --- packages/pytest-simcore/src/pytest_simcore/aws_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index b47844f893d..7c3c5d5fd6b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -121,7 +121,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001 + S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 S3_SECRET_KEY=IDStr("xxx"), S3_BUCKET_NAME=IDStr(f"pytest{faker.pystr().lower()}"), S3_REGION=IDStr("us-east-1"), @@ -133,5 +133,5 @@ def mocked_s3_server_envs( mocked_s3_server_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_s3_server_settings.dict(exclude_unset=True) + changed_envs: EnvVarsDict = mocked_s3_server_settings.model_dump(mode="json", exclude_unset=True) return setenvs_from_dict(monkeypatch, {**changed_envs}) From 3d734e079af385f43f4816f7cf89c6fd107a62fc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:17:11 +0200 Subject: [PATCH 048/280] fix endpoint_url type --- packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py index 48fb2d1283e..cf4feca410b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py @@ -25,14 +25,14 @@ async def s3_client(s3_settings: S3Settings) -> typing.AsyncIterator[S3Client]: exit_stack = contextlib.AsyncExitStack() session_client = session.client( "s3", - endpoint_url=s3_settings.S3_ENDPOINT, + endpoint_url=str(s3_settings.S3_ENDPOINT), aws_access_key_id=s3_settings.S3_ACCESS_KEY, aws_secret_access_key=s3_settings.S3_SECRET_KEY, region_name=s3_settings.S3_REGION, config=Config(signature_version="s3v4"), ) assert isinstance(session_client, ClientCreatorContext) - client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) + client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] yield client From c667921a21f718b5051f764eff9ab4d1325cec6b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:21:27 +0200 Subject: [PATCH 049/280] remove unused type ignore --- packages/aws-library/src/aws_library/s3/_client.py | 2 +- packages/aws-library/src/aws_library/ssm/_client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 74364ce4e19..5d1b357dead 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -78,7 +78,7 @@ async def create( ) assert isinstance(session_client, ClientCreatorContext) # nosec exit_stack = contextlib.AsyncExitStack() - s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] + s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) # NOTE: this triggers a botocore.exception.ClientError in case the connection is not made to the S3 backend await s3_client.list_buckets() diff --git a/packages/aws-library/src/aws_library/ssm/_client.py b/packages/aws-library/src/aws_library/ssm/_client.py index 4cc09ac4eb8..a46bf14bf93 100644 --- a/packages/aws-library/src/aws_library/ssm/_client.py +++ b/packages/aws-library/src/aws_library/ssm/_client.py @@ -56,7 +56,7 @@ async def create(cls, settings: SSMSettings) -> "SimcoreSSMAPI": ) assert isinstance(session_client, ClientCreatorContext) # nosec exit_stack = contextlib.AsyncExitStack() - ec2_client = cast(SSMClient, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] + ec2_client = cast(SSMClient, await exit_stack.enter_async_context(session_client)) return cls(ec2_client, session, exit_stack) async def close(self) -> None: From 4fb2bf2d1858e220f0ea01c85b2afb35554336c7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 14:35:44 +0200 Subject: [PATCH 050/280] update error base class --- .../src/simcore_postgres_database/utils_folders.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index cdcab9c9551..b8330cdee69 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -7,6 +7,8 @@ from functools import reduce from typing import Annotated, Any, ClassVar, Final, TypeAlias, cast +from models_library.errors_classes import OsparcErrorMixin + import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy @@ -20,7 +22,6 @@ TypeAdapter, ValidationError, ) -from pydantic.errors import PydanticErrorMixin from simcore_postgres_database.utils_ordering import OrderByDict from sqlalchemy import Column, func from sqlalchemy.dialects import postgresql @@ -64,7 +65,7 @@ """ -class FoldersError(PydanticErrorMixin, RuntimeError): +class FoldersError(OsparcErrorMixin, RuntimeError): pass From c11f109d0ce7d4117c5e2c53ff6c72985abb0f2e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 15:02:52 +0200 Subject: [PATCH 051/280] fix folders error --- .../utils_folders.py | 44 +++++++++++++++++-- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index b8330cdee69..e102b925f69 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -7,7 +7,6 @@ from functools import reduce from typing import Annotated, Any, ClassVar, Final, TypeAlias, cast -from models_library.errors_classes import OsparcErrorMixin import sqlalchemy as sa from aiopg.sa.connection import SAConnection @@ -22,6 +21,8 @@ TypeAdapter, ValidationError, ) +from pydantic.errors import PydanticErrorMixin + from simcore_postgres_database.utils_ordering import OrderByDict from sqlalchemy import Column, func from sqlalchemy.dialects import postgresql @@ -64,9 +65,44 @@ * ProjectAlreadyExistsInFolderError """ - -class FoldersError(OsparcErrorMixin, RuntimeError): - pass +class _DefaultDict(dict): + def __missing__(self, key): + return f"'{key}=?'" + + +class FoldersError(PydanticErrorMixin, RuntimeError): + msg_template: str + + def __new__(cls, *_args, **_kwargs): + if not hasattr(cls, "code"): + cls.code = cls._get_full_class_name() # type: ignore[assignment] + return super().__new__(cls) + + def __init__(self, *_args, **kwargs) -> None: + self.__dict__ = kwargs + super().__init__(message=self._build_message(), code=self.code) + + def __str__(self) -> str: + return self._build_message() + + def _build_message(self) -> str: + # NOTE: safe. Does not raise KeyError + return self.msg_template.format_map(_DefaultDict(**self.__dict__)) + + @classmethod + def _get_full_class_name(cls) -> str: + relevant_classes = [ + c.__name__ + for c in cls.__mro__[:-1] + if c.__name__ + not in ( + "PydanticErrorMixin", + "FoldersError", + "Exception", + "BaseException", + ) + ] + return ".".join(reversed(relevant_classes)) class InvalidFolderNameError(FoldersError): From 68fa4aeaada86070481f8667524bf695b14df627 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 16:01:04 +0200 Subject: [PATCH 052/280] use common osparcerrormixin --- .../postgres-database/requirements/ci.txt | 1 + .../postgres-database/requirements/dev.txt | 1 + .../utils_folders.py | 45 ++----------------- .../utils_projects_metadata.py | 4 +- .../utils_projects_nodes.py | 4 +- 5 files changed, 10 insertions(+), 45 deletions(-) diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index b901feff8e4..ffac2669760 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages pytest-simcore @ ../../packages/pytest-simcore/ +simcore-models-library @ ../models-library # current module simcore-postgres-database @ . diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt index 8136f1a48b5..a05947becdf 100644 --- a/packages/postgres-database/requirements/dev.txt +++ b/packages/postgres-database/requirements/dev.txt @@ -14,6 +14,7 @@ # installs this repo's packages --editable ../../packages/pytest-simcore/ +--editable ../models-library/ # current module --editable . diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index e102b925f69..16aed6ea035 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -7,10 +7,10 @@ from functools import reduce from typing import Annotated, Any, ClassVar, Final, TypeAlias, cast - import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy +from models_library.errors_classes import OsparcErrorMixin from pydantic import ( BaseModel, ConfigDict, @@ -21,8 +21,6 @@ TypeAdapter, ValidationError, ) -from pydantic.errors import PydanticErrorMixin - from simcore_postgres_database.utils_ordering import OrderByDict from sqlalchemy import Column, func from sqlalchemy.dialects import postgresql @@ -65,44 +63,9 @@ * ProjectAlreadyExistsInFolderError """ -class _DefaultDict(dict): - def __missing__(self, key): - return f"'{key}=?'" - - -class FoldersError(PydanticErrorMixin, RuntimeError): - msg_template: str - - def __new__(cls, *_args, **_kwargs): - if not hasattr(cls, "code"): - cls.code = cls._get_full_class_name() # type: ignore[assignment] - return super().__new__(cls) - - def __init__(self, *_args, **kwargs) -> None: - self.__dict__ = kwargs - super().__init__(message=self._build_message(), code=self.code) - - def __str__(self) -> str: - return self._build_message() - - def _build_message(self) -> str: - # NOTE: safe. Does not raise KeyError - return self.msg_template.format_map(_DefaultDict(**self.__dict__)) - - @classmethod - def _get_full_class_name(cls) -> str: - relevant_classes = [ - c.__name__ - for c in cls.__mro__[:-1] - if c.__name__ - not in ( - "PydanticErrorMixin", - "FoldersError", - "Exception", - "BaseException", - ) - ] - return ".".join(reversed(relevant_classes)) + +class FoldersError(OsparcErrorMixin, RuntimeError): + ... class InvalidFolderNameError(FoldersError): diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index efda444a68f..02e08098d1a 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -5,8 +5,8 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy +from models_library.errors_classes import OsparcErrorMixin from pydantic import BaseModel, ConfigDict -from pydantic.errors import PydanticErrorMixin from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation @@ -18,7 +18,7 @@ # -class BaseProjectsMetadataError(PydanticErrorMixin, RuntimeError): +class BaseProjectsMetadataError(OsparcErrorMixin, RuntimeError): msg_template: str = "Project metadata unexpected error" diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 2ee32815626..ba1028b3bc4 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -5,8 +5,8 @@ import sqlalchemy from aiopg.sa.connection import SAConnection +from models_library.errors_classes import OsparcErrorMixin from pydantic import BaseModel, ConfigDict, Field -from pydantic.errors import PydanticErrorMixin from sqlalchemy.dialects.postgresql import insert as pg_insert from .errors import ForeignKeyViolation, UniqueViolation @@ -17,7 +17,7 @@ # # Errors # -class BaseProjectNodesError(PydanticErrorMixin, RuntimeError): +class BaseProjectNodesError(OsparcErrorMixin, RuntimeError): msg_template: str = "Project nodes unexpected error" From 40a4627d927464b58f425e1600d1824aa3bc7a10 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 16:10:58 +0200 Subject: [PATCH 053/280] continue upgrading --- .../tests/email/test_email_events.py | 2 +- .../src/pytest_simcore/faker_products_data.py | 8 +++----- .../src/pytest_simcore/faker_users_data.py | 10 ++++------ .../src/pytest_simcore/helpers/faker_factories.py | 2 +- 4 files changed, 9 insertions(+), 13 deletions(-) diff --git a/packages/notifications-library/tests/email/test_email_events.py b/packages/notifications-library/tests/email/test_email_events.py index 9a1b383522f..995da5faf4e 100644 --- a/packages/notifications-library/tests/email/test_email_events.py +++ b/packages/notifications-library/tests/email/test_email_events.py @@ -67,7 +67,7 @@ def ipinfo(faker: Faker) -> dict[str, Any]: def request_form(faker: Faker) -> dict[str, Any]: return AccountRequestInfo( **AccountRequestInfo.model_config["json_schema_extra"]["example"] - ).dict() + ).model_dump() @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py index f82636b6633..e55c1e489f0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py @@ -14,7 +14,7 @@ import pytest from faker import Faker from models_library.products import ProductName, StripePriceID, StripeTaxRateID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from .helpers.faker_factories import random_product @@ -51,8 +51,7 @@ def product_name() -> ProductName: def support_email( request: pytest.FixtureRequest, product_name: ProductName ) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption("--faker-support-email", default=None) or f"support@{product_name}.info", ) @@ -60,8 +59,7 @@ def support_email( @pytest.fixture def bcc_email(request: pytest.FixtureRequest, product_name: ProductName) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption("--faker-bcc-email", default=None) or f"finance@{product_name}-department.info", ) diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py index 6ba011db47c..4e59b6db93a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py @@ -14,7 +14,7 @@ from faker import Faker from models_library.basic_types import IDStr from models_library.users import UserID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from .helpers.faker_factories import DEFAULT_TEST_PASSWORD, random_user @@ -61,8 +61,7 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture def user_id(faker: Faker, request: pytest.FixtureRequest) -> UserID: - return parse_obj_as( - UserID, + return TypeAdapter(UserID).validate_python( request.config.getoption("--faker-user-id", default=None) or faker.pyint(), ) @@ -74,8 +73,7 @@ def is_external_user_email(request: pytest.FixtureRequest) -> bool: @pytest.fixture def user_email(faker: Faker, request: pytest.FixtureRequest) -> EmailStr: - return parse_obj_as( - EmailStr, + return TypeAdapter(EmailStr).validate_python( request.config.getoption(_FAKE_USER_EMAIL_OPTION, default=None) or faker.email(), ) @@ -93,7 +91,7 @@ def user_last_name(faker: Faker) -> str: @pytest.fixture def user_name(user_email: str) -> IDStr: - return parse_obj_as(IDStr, user_email.split("@")[0]) + return TypeAdapter(IDStr).validate_python(user_email.split("@")[0]) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index bc415ab3161..f51a5d8211b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -209,7 +209,7 @@ def random_product( registration_email_template: str | None = None, fake: Faker = DEFAULT_FAKER, **overrides, -): +) -> dict[str, Any]: """ Foreign keys are: From 0563d81bd646c46919c552d5fce517f11c4355e2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 20:45:22 +0200 Subject: [PATCH 054/280] upgrade dependencies --- packages/service-library/requirements/_aiohttp.txt | 2 -- packages/service-library/requirements/_base.txt | 14 ++++++++++++-- packages/service-library/requirements/_fastapi.txt | 12 ++++++++---- packages/service-library/requirements/_test.txt | 8 +++----- 4 files changed, 23 insertions(+), 13 deletions(-) diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt index b0132a53002..f97b5b35ee8 100644 --- a/packages/service-library/requirements/_aiohttp.txt +++ b/packages/service-library/requirements/_aiohttp.txt @@ -33,8 +33,6 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -greenlet==3.0.3 - # via sqlalchemy idna==3.7 # via # requests diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 0f4ac721816..6d0447e7c0e 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -72,7 +72,7 @@ orjson==3.10.7 # -r requirements/../../../packages/models-library/requirements/_base.in pamqp==3.3.0 # via aiormq -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -81,14 +81,24 @@ pydantic==2.9.1 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends -pydantic-core==2.23.3 + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 2f066595b42..abaaac8e111 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -1,3 +1,5 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # httpx @@ -11,7 +13,7 @@ certifi==2024.7.4 # httpx click==8.1.7 # via uvicorn -fastapi==0.99.1 +fastapi==0.115.0 # via # -r requirements/_fastapi.in # prometheus-fastapi-instrumentator @@ -37,18 +39,19 @@ prometheus-client==0.20.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/_fastapi.in -pydantic==1.10.18 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # fastapi +pydantic-core==2.23.4 + # via pydantic sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.5 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -58,5 +61,6 @@ typing-extensions==4.12.2 # via # fastapi # pydantic + # pydantic-core uvicorn==0.30.6 # via -r requirements/_fastapi.in diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index a64640db6ad..38282969f74 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -59,10 +59,6 @@ frozenlist==1.4.1 # -c requirements/_base.txt # aiohttp # aiosignal -greenlet==3.0.3 - # via - # -c requirements/_aiohttp.txt - # sqlalchemy h11==0.14.0 # via # -c requirements/_fastapi.txt @@ -186,7 +182,9 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt From 2094d4fd9c24952267ee21ff832f43c504d80094 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 20:54:00 +0200 Subject: [PATCH 055/280] continue upgrading --- packages/service-library/src/servicelib/rabbitmq/_models.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/_models.py b/packages/service-library/src/servicelib/rabbitmq/_models.py index 565447072fa..52119847033 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_models.py +++ b/packages/service-library/src/servicelib/rabbitmq/_models.py @@ -2,12 +2,13 @@ from collections.abc import Awaitable, Callable from typing import Any, Protocol +from models_library.basic_types import ConstrainedStr from models_library.rabbitmq_basic_types import ( REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS, RPCMethodName, RPCNamespace, ) -from pydantic import ConstrainedStr, parse_obj_as +from pydantic import TypeAdapter MessageHandler = Callable[[Any], Awaitable[bool]] @@ -30,4 +31,4 @@ def from_namespace_and_method( cls, namespace: RPCNamespace, method_name: RPCMethodName ) -> "RPCNamespacedMethodName": namespaced_method_name = f"{namespace}.{method_name}" - return parse_obj_as(cls, namespaced_method_name) + return TypeAdapter(cls).validate_python(namespaced_method_name) From 57b07105783106c12e3cf8dba8916884f9eef8e4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 20 Sep 2024 20:59:23 +0200 Subject: [PATCH 056/280] continue upgrading --- .../src/servicelib/deferred_tasks/_redis_task_tracker.py | 6 +++++- .../service-library/tests/deferred_tasks/example_app.py | 4 ++-- .../service-library/tests/fastapi/test_exceptions_utils.py | 6 +++--- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py index 69762108e71..718af8c526f 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py @@ -33,7 +33,11 @@ async def get_new_unique_identifier(self) -> TaskUID: async def _get_raw(self, redis_key: str) -> TaskScheduleModel | None: found_data = await self.redis_client_sdk.redis.get(redis_key) - return None if found_data is None else TaskScheduleModel.parse_raw(found_data) + return ( + None + if found_data is None + else TaskScheduleModel.model_validate_json(found_data) + ) async def get(self, task_uid: TaskUID) -> TaskScheduleModel | None: return await self._get_raw(_get_key(task_uid)) diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index 75850fddc2e..1962d0b4232 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -107,8 +107,8 @@ async def _commands_handler( ) -> Any: """Handles all commands send by remote party""" if command == "init-context": - context.redis_settings = RedisSettings.parse_raw(payload["redis"]) - context.rabbit_settings = RabbitSettings.parse_raw(payload["rabbit"]) + context.redis_settings = RedisSettings.model_validate_json(payload["redis"]) + context.rabbit_settings = RabbitSettings.model_validate_json(payload["rabbit"]) # using the same db as the deferred tasks with different keys context.in_memory_lists = InMemoryLists(context.redis_settings, port) diff --git a/packages/service-library/tests/fastapi/test_exceptions_utils.py b/packages/service-library/tests/fastapi/test_exceptions_utils.py index 845043f3405..cfe7fbde0e8 100644 --- a/packages/service-library/tests/fastapi/test_exceptions_utils.py +++ b/packages/service-library/tests/fastapi/test_exceptions_utils.py @@ -10,7 +10,7 @@ from fastapi import FastAPI, HTTPException from httpx import AsyncClient from models_library.api_schemas__common.errors import DefaultApiError -from pydantic import parse_raw_as +from pydantic import TypeAdapter from servicelib.fastapi.exceptions_utils import ( handle_errors_as_500, http_exception_as_json_response, @@ -66,7 +66,7 @@ async def test_http_errors_respond_with_error_model( response = await client.post(f"/error/{code}") assert response.status_code == code - error = parse_raw_as(DefaultApiError, response.text) + error = TypeAdapter(DefaultApiError).validate_json(response.text) assert error.detail == f"test {code}" assert error.name @@ -79,4 +79,4 @@ async def test_non_http_error_handling( response = await client.post(f"/raise/{code}") print(response) - error = parse_raw_as(DefaultApiError, response.text) + error = TypeAdapter(DefaultApiError).validate_json(response.text) From 29306b1613ddbcc3e5d46b5897995362d23d3f22 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Sun, 22 Sep 2024 22:27:18 +0200 Subject: [PATCH 057/280] continue upgrading --- .../src/pytest_simcore/aws_server.py | 10 ++++++---- .../src/pytest_simcore/faker_projects_data.py | 7 +++---- .../helpers/httpx_calls_capture_openapi.py | 4 ++-- .../src/pytest_simcore/httpbin_service.py | 4 ++-- .../src/pytest_simcore/httpx_calls_capture.py | 8 ++++---- .../services_api_mocks_for_aiohttp_clients.py | 19 ++++++++++-------- .../pytest_simcore/simcore_storage_service.py | 6 ++++-- .../servicelib/aiohttp/application_setup.py | 6 ++++-- .../src/servicelib/aiohttp/docker_utils.py | 16 +++++++++------ .../src/servicelib/docker_utils.py | 10 +++++++--- .../src/servicelib/fastapi/docker_utils.py | 20 ++++++++++++------- .../src/servicelib/file_utils.py | 4 ++-- .../src/servicelib/progress_bar.py | 4 ++-- .../src/servicelib/utils_meta.py | 4 ++-- 14 files changed, 72 insertions(+), 50 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_server.py b/packages/pytest-simcore/src/pytest_simcore/aws_server.py index 7c3c5d5fd6b..74f007973c5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_server.py @@ -11,7 +11,7 @@ from faker import Faker from models_library.utils.fastapi_encoders import jsonable_encoder from moto.server import ThreadedMotoServer -from pydantic import AnyHttpUrl, SecretStr, TypeAdapter, parse_obj_as +from pydantic import SecretStr from pytest_mock.plugin import MockerFixture from settings_library.basic_types import IDStr from settings_library.ec2 import EC2Settings @@ -101,7 +101,7 @@ def mocked_ssm_server_settings( ) -> SSMSettings: return SSMSettings( SSM_ACCESS_KEY_ID=SecretStr("xxx"), - SSM_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 + SSM_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 SSM_SECRET_ACCESS_KEY=SecretStr("xxx"), ) @@ -121,7 +121,7 @@ def mocked_s3_server_settings( ) -> S3Settings: return S3Settings( S3_ACCESS_KEY=IDStr("xxx"), - S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 + S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # type: ignore[arg-type] # pylint: disable=protected-access # noqa: SLF001 S3_SECRET_KEY=IDStr("xxx"), S3_BUCKET_NAME=IDStr(f"pytest{faker.pystr().lower()}"), S3_REGION=IDStr("us-east-1"), @@ -133,5 +133,7 @@ def mocked_s3_server_envs( mocked_s3_server_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = mocked_s3_server_settings.model_dump(mode="json", exclude_unset=True) + changed_envs: EnvVarsDict = mocked_s3_server_settings.model_dump( + mode="json", exclude_unset=True + ) return setenvs_from_dict(monkeypatch, {**changed_envs}) diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py index 643ffee8859..f2d0eee8105 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_projects_data.py @@ -13,7 +13,7 @@ from faker import Faker from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from pydantic import parse_obj_as +from pydantic import TypeAdapter _MESSAGE = ( "If set, it overrides the fake value of `{}` fixture." @@ -34,12 +34,11 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture def project_id(faker: Faker, request: pytest.FixtureRequest) -> ProjectID: - return parse_obj_as( - ProjectID, + return TypeAdapter(ProjectID).validate_python( request.config.getoption("--faker-project-id", default=None) or faker.uuid4(), ) @pytest.fixture def node_id(faker: Faker) -> NodeID: - return parse_obj_as(NodeID, faker.uuid4()) + return TypeAdapter(NodeID).validate_python(faker.uuid4()) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py index fd5afaa183a..177b1330e36 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_openapi.py @@ -5,7 +5,7 @@ import httpx import jsonref -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from settings_library.catalog import CatalogSettings from settings_library.director_v2 import DirectorV2Settings from settings_library.storage import StorageSettings @@ -87,7 +87,7 @@ def _get_params( raise VerbNotInPathError(msg) if (params := verb_spec.get("parameters")) is None: continue - all_params += parse_obj_as(list[CapturedParameter], params) + all_params += TypeAdapter(list[CapturedParameter]).validate_python(params) return set(all_params) diff --git a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py index b6c0a5aad3b..6bc71929eb3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpbin_service.py @@ -14,7 +14,7 @@ import requests import requests.exceptions from docker.errors import APIError -from pydantic import HttpUrl, parse_obj_as +from pydantic import HttpUrl, TypeAdapter from tenacity import retry from tenacity.after import after_log from tenacity.retry import retry_if_exception_type @@ -56,7 +56,7 @@ def _wait_until_httpbin_is_responsive(): _wait_until_httpbin_is_responsive() - yield parse_obj_as(HttpUrl, base_url) + yield TypeAdapter(HttpUrl).validate_python(base_url) finally: with suppress(APIError): diff --git a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py index 5c8df1ff6c5..d8cd056c115 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py @@ -38,7 +38,7 @@ import pytest import respx import yaml -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.docker import get_service_published_port from pytest_simcore.helpers.host import get_localhost_ip @@ -213,9 +213,9 @@ def _( assert capture_path.suffix == ".json" if services_mocks_enabled: - captures: list[HttpApiCallCaptureModel] = parse_obj_as( - list[HttpApiCallCaptureModel], json.loads(capture_path.read_text()) - ) + captures: list[HttpApiCallCaptureModel] = TypeAdapter( + list[HttpApiCallCaptureModel] + ).validate_python(json.loads(capture_path.read_text())) if len(side_effects_callbacks) > 0: assert len(side_effects_callbacks) == len(captures) diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index 704f1a33990..8c0f0128281 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -28,7 +28,7 @@ from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status from yarl import URL @@ -372,11 +372,13 @@ def get_upload_link_cb(url: URL, **kwargs) -> CallbackResult: if file_size := kwargs["params"].get("file_size") is not None: assert file_size upload_schema = FileUploadSchema( - chunk_size=parse_obj_as(ByteSize, "5GiB"), - urls=[parse_obj_as(AnyUrl, f"{scheme[link_type]}://{file_id}")], + chunk_size=TypeAdapter(ByteSize).validate_python("5GiB"), + urls=[ + TypeAdapter(AnyUrl).validate_python(f"{scheme[link_type]}://{file_id}") + ], links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, f"{url}:abort"), - complete_upload=parse_obj_as(AnyUrl, f"{url}:complete"), + abort_upload=TypeAdapter(AnyUrl).validate_python(f"{url}:abort"), + complete_upload=TypeAdapter(AnyUrl).validate_python(f"{url}:complete"), ), ) return CallbackResult( @@ -385,7 +387,7 @@ def get_upload_link_cb(url: URL, **kwargs) -> CallbackResult: ) # version 1 returns a presigned link presigned_link = PresignedLink( - link=parse_obj_as(AnyUrl, f"{scheme[link_type]}://{file_id}") + link=TypeAdapter(AnyUrl).validate_python(f"{scheme[link_type]}://{file_id}") ) return CallbackResult( status=status.HTTP_200_OK, @@ -473,8 +475,9 @@ def generate_future_link(url, **kwargs): (parsed_url.scheme, parsed_url.netloc, parsed_url.path, "", "", "") ) - payload: FileUploadCompleteResponse = parse_obj_as( - FileUploadCompleteResponse, + payload: FileUploadCompleteResponse = TypeAdapter( + FileUploadCompleteResponse + ).validate_python( { "links": { "state": stripped_url + ":complete/futures/" + str(faker.uuid4()) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index 9628d1058c9..e2f7654d3d0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -10,7 +10,7 @@ import tenacity from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from pytest_mock import MockerFixture from servicelib.minio_utils import ServiceRetryPolicyUponInitialization from yarl import URL @@ -82,6 +82,8 @@ def create_simcore_file_id() -> Callable[[ProjectID, NodeID, str], SimcoreS3File def _creator( project_id: ProjectID, node_id: NodeID, file_name: str ) -> SimcoreS3FileID: - return parse_obj_as(SimcoreS3FileID, f"{project_id}/{node_id}/{file_name}") + return TypeAdapter(SimcoreS3FileID).validate_python( + f"{project_id}/{node_id}/{file_name}" + ) return _creator diff --git a/packages/service-library/src/servicelib/aiohttp/application_setup.py b/packages/service-library/src/servicelib/aiohttp/application_setup.py index 4fae3acc09f..4da40aa0182 100644 --- a/packages/service-library/src/servicelib/aiohttp/application_setup.py +++ b/packages/service-library/src/servicelib/aiohttp/application_setup.py @@ -8,7 +8,7 @@ import arrow from aiohttp import web -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .application_keys import APP_CONFIG_KEY, APP_SETTINGS_KEY @@ -94,7 +94,9 @@ def _is_addon_enabled_from_config( for part in parts: if section and part == "enabled": # if section exists, no need to explicitly enable it - return parse_obj_as(bool, searched_config.get(part, True)) + return TypeAdapter(bool).validate_python( + searched_config.get(part, True) + ) searched_config = searched_config[part] except KeyError as ee: diff --git a/packages/service-library/src/servicelib/aiohttp/docker_utils.py b/packages/service-library/src/servicelib/aiohttp/docker_utils.py index 636b3492616..8e9393e1e69 100644 --- a/packages/service-library/src/servicelib/aiohttp/docker_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/docker_utils.py @@ -2,7 +2,7 @@ import aiohttp from models_library.docker import DockerGenericTag -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -68,9 +68,9 @@ async def retrieve_image_layer_information( # if the image has multiple architectures json_response = await response.json() try: - multi_arch_manifests = parse_obj_as( - DockerImageMultiArchManifestsV2, json_response - ) + multi_arch_manifests = TypeAdapter( + DockerImageMultiArchManifestsV2 + ).validate_python(json_response) # find the correct platform digest = "" for manifest in multi_arch_manifests.manifests: @@ -89,8 +89,12 @@ async def retrieve_image_layer_information( response.raise_for_status() assert response.status == status.HTTP_200_OK # nosec json_response = await response.json() - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) except ValidationError: - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) return None diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index dfab07283d5..df976c623af 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -11,7 +11,7 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ProgressDetail from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ConfigDict, ValidationError, parse_obj_as +from pydantic import BaseModel, ByteSize, ConfigDict, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -60,7 +60,9 @@ class DockerImageManifestsV2(BaseModel): @cached_property def layers_total_size(self) -> ByteSize: - return parse_obj_as(ByteSize, sum(layer.size for layer in self.layers)) + return TypeAdapter(ByteSize).validate_python( + sum(layer.size for layer in self.layers) + ) class DockerImageMultiArchManifestsV2(BaseModel): @@ -241,7 +243,9 @@ async def pull_image( image, stream=True, auth=registry_auth ): try: - parsed_progress = parse_obj_as(_DockerPullImage, pull_progress) + parsed_progress = TypeAdapter(_DockerPullImage).validate_python( + pull_progress + ) except ValidationError: _logger.exception( "Unexpected error while validating '%s'. " diff --git a/packages/service-library/src/servicelib/fastapi/docker_utils.py b/packages/service-library/src/servicelib/fastapi/docker_utils.py index 1c71c190a47..c7db4c1cf2d 100644 --- a/packages/service-library/src/servicelib/fastapi/docker_utils.py +++ b/packages/service-library/src/servicelib/fastapi/docker_utils.py @@ -5,7 +5,7 @@ import httpx from models_library.basic_types import IDStr from models_library.docker import DockerGenericTag -from pydantic import ByteSize, ValidationError, parse_obj_as +from pydantic import ByteSize, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings from yarl import URL @@ -72,9 +72,9 @@ async def retrieve_image_layer_information( # if the image has multiple architectures json_response = response.json() try: - multi_arch_manifests = parse_obj_as( - DockerImageMultiArchManifestsV2, json_response - ) + multi_arch_manifests = TypeAdapter( + DockerImageMultiArchManifestsV2 + ).validate_python(json_response) # find the correct platform digest = "" for manifest in multi_arch_manifests.manifests: @@ -93,14 +93,20 @@ async def retrieve_image_layer_information( response.raise_for_status() assert response.status_code == status.HTTP_200_OK # nosec json_response = response.json() - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) except ValidationError: - return parse_obj_as(DockerImageManifestsV2, json_response) + return TypeAdapter(DockerImageManifestsV2).validate_python( + json_response + ) return None -_DEFAULT_MIN_IMAGE_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "200MiB") +_DEFAULT_MIN_IMAGE_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "200MiB" +) async def pull_images( diff --git a/packages/service-library/src/servicelib/file_utils.py b/packages/service-library/src/servicelib/file_utils.py index c90468cba2a..a52854c26e7 100644 --- a/packages/service-library/src/servicelib/file_utils.py +++ b/packages/service-library/src/servicelib/file_utils.py @@ -10,9 +10,9 @@ # https://docs.python.org/3/library/os.html#os.remove from aiofiles.os import remove from aiofiles.os import wrap as sync_to_async -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter -CHUNK_4KB: Final[ByteSize] = parse_obj_as(ByteSize, "4kb") # 4K blocks +CHUNK_4KB: Final[ByteSize] = TypeAdapter(ByteSize).validate_python("4kb") # 4K blocks class AsyncStream(Protocol): diff --git a/packages/service-library/src/servicelib/progress_bar.py b/packages/service-library/src/servicelib/progress_bar.py index 782f89ba550..bf70c0c3e88 100644 --- a/packages/service-library/src/servicelib/progress_bar.py +++ b/packages/service-library/src/servicelib/progress_bar.py @@ -10,7 +10,7 @@ ProgressStructuredMessage, ProgressUnit, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter from .logging_utils import log_catch @@ -95,7 +95,7 @@ async def main_fct(): def __post_init__(self) -> None: if self.progress_unit is not None: - parse_obj_as(ProgressUnit, self.progress_unit) # type: ignore[arg-type] # mypy does not like Literal with parse_obj_as + TypeAdapter(ProgressUnit).validate_python(self.progress_unit) self._continuous_value_lock = asyncio.Lock() self.num_steps = max(1, self.num_steps) if self.step_weights: diff --git a/packages/service-library/src/servicelib/utils_meta.py b/packages/service-library/src/servicelib/utils_meta.py index 46fa78dd83e..6ee48fd4d56 100644 --- a/packages/service-library/src/servicelib/utils_meta.py +++ b/packages/service-library/src/servicelib/utils_meta.py @@ -6,7 +6,7 @@ from models_library.basic_types import VersionStr from packaging.version import Version -from pydantic import parse_obj_as +from pydantic import TypeAdapter class PackageInfo: @@ -40,7 +40,7 @@ def version(self) -> Version: @property def __version__(self) -> VersionStr: - return parse_obj_as(VersionStr, self._distribution.version) + return TypeAdapter(VersionStr).validate_python(self._distribution.version) @property def api_prefix_path_tag(self) -> str: From a9c3ef37c34929132ce24f4bb0baa00901733421 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 09:26:22 +0200 Subject: [PATCH 058/280] remove deprecated parse_obj_as --- .../fastapi/long_running_tasks/_client.py | 5 ++-- .../rpc_interfaces/catalog/services.py | 26 ++++++++++--------- .../dynamic_scheduler/services.py | 8 +++--- .../efs_guardian/efs_manager.py | 4 +-- .../resource_usage_tracker/pricing_plans.py | 16 ++++++------ .../resource_usage_tracker/pricing_units.py | 8 +++--- .../resource_usage_tracker/service_runs.py | 10 ++++--- .../service-library/src/servicelib/redis.py | 4 +-- .../aiohttp/long_running_tasks/conftest.py | 4 +-- .../test_long_running_tasks.py | 6 ++--- ...st_long_running_tasks_with_task_context.py | 6 ++--- .../tests/aiohttp/test_docker_utils.py | 5 ++-- .../test__redis_task_tracker.py | 5 ++-- .../test_long_running_tasks.py | 12 +++++---- ...test_long_running_tasks_context_manager.py | 10 +++---- .../tests/fastapi/test_docker_utils.py | 7 +++-- .../tests/fastapi/test_http_client_thin.py | 4 +-- .../tests/test_archiving_utils.py | 5 ++-- 18 files changed, 74 insertions(+), 71 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index a2dda66735a..24fd85077d4 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -6,7 +6,7 @@ from fastapi import FastAPI, status from httpx import AsyncClient, HTTPError -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -129,8 +129,7 @@ def _client_configuration(self) -> ClientConfiguration: return output def _get_url(self, path: str) -> AnyHttpUrl: - output: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, + output: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( f"{self._base_url}{self._client_configuration.router_prefix}{path}", ) return output diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py index cc67413aefe..83efa8f1d0e 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/services.py @@ -16,7 +16,7 @@ ) from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import NonNegativeInt, parse_obj_as, validate_arguments +from pydantic import NonNegativeInt, TypeAdapter, validate_call from servicelib.logging_utils import log_decorator from servicelib.rabbitmq._constants import RPC_REQUEST_DEFAULT_TIMEOUT_S @@ -40,7 +40,7 @@ async def list_services_paginated( # pylint: disable=too-many-arguments CatalogForbiddenError: no access-rights to list services """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -49,7 +49,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_services_paginated"), + TypeAdapter(RPCMethodName).validate_python("list_services_paginated"), product_name=product_name, user_id=user_id, limit=limit, @@ -60,7 +60,9 @@ async def _call( result = await _call( product_name=product_name, user_id=user_id, limit=limit, offset=offset ) - assert parse_obj_as(PageRpc[ServiceGetV2], result) is not None # nosec + assert ( + TypeAdapter(PageRpc[ServiceGetV2]).validate_python(result) is not None + ) # nosec return cast(PageRpc[ServiceGetV2], result) @@ -80,7 +82,7 @@ async def get_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -89,7 +91,7 @@ async def _call( ) -> Any: return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service"), + TypeAdapter(RPCMethodName).validate_python("get_service"), product_name=product_name, user_id=user_id, service_key=service_key, @@ -103,7 +105,7 @@ async def _call( service_key=service_key, service_version=service_version, ) - assert parse_obj_as(ServiceGetV2, result) is not None # nosec + assert TypeAdapter(ServiceGetV2).validate_python(result) is not None # nosec return cast(ServiceGetV2, result) @@ -125,7 +127,7 @@ async def update_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -135,7 +137,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_service"), + TypeAdapter(RPCMethodName).validate_python("update_service"), product_name=product_name, user_id=user_id, service_key=service_key, @@ -150,7 +152,7 @@ async def _call( service_version=service_version, update=update, ) - assert parse_obj_as(ServiceGetV2, result) is not None # nosec + assert TypeAdapter(ServiceGetV2).validate_python(result) is not None # nosec return cast(ServiceGetV2, result) @@ -170,7 +172,7 @@ async def check_for_service( CatalogForbiddenError: not access rights to read this service """ - @validate_arguments() + @validate_call() async def _call( product_name: ProductName, user_id: UserID, @@ -179,7 +181,7 @@ async def _call( ): return await rpc_client.request( CATALOG_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "check_for_service"), + TypeAdapter(RPCMethodName).validate_python("check_for_service"), product_name=product_name, user_id=user_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index 9da2dad425e..9bf8e262611 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -10,7 +10,7 @@ from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient @@ -33,7 +33,7 @@ async def get_service_status( ) -> NodeGetIdle | DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service_status"), + TypeAdapter(RPCMethodName).validate_python("get_service_status"), node_id=node_id, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -49,7 +49,7 @@ async def run_dynamic_service( ) -> DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "run_dynamic_service"), + TypeAdapter(RPCMethodName).validate_python("run_dynamic_service"), dynamic_service_start=dynamic_service_start, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -66,7 +66,7 @@ async def stop_dynamic_service( ) -> None: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "stop_dynamic_service"), + TypeAdapter(RPCMethodName).validate_python("stop_dynamic_service"), dynamic_service_stop=dynamic_service_stop, timeout_s=timeout_s, ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py index 592959eb08c..ec05906b1ef 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/efs_guardian/efs_manager.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -27,7 +27,7 @@ async def create_project_specific_data_dir( ) -> Path: output: Path = await rabbitmq_rpc_client.request( EFS_GUARDIAN_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_project_specific_data_dir"), + TypeAdapter(RPCMethodName).validate_python("create_project_specific_data_dir"), project_id=project_id, node_id=node_id, storage_directory_name=storage_directory_name, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py index a7dc4b5d404..a2c9259aa0b 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py @@ -16,7 +16,7 @@ PricingPlanUpdate, ) from models_library.services import ServiceKey, ServiceVersion -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -36,7 +36,7 @@ async def get_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_pricing_plan"), + TypeAdapter(RPCMethodName).validate_python("get_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, timeout_s=_DEFAULT_TIMEOUT_S, @@ -53,7 +53,7 @@ async def list_pricing_plans( ) -> list[PricingPlanGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "list_pricing_plans"), + TypeAdapter(RPCMethodName).validate_python("list_pricing_plans"), product_name=product_name, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -69,7 +69,7 @@ async def create_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_pricing_plan"), + TypeAdapter(RPCMethodName).validate_python("create_pricing_plan"), data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -86,7 +86,7 @@ async def update_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_pricing_plan"), + TypeAdapter(RPCMethodName).validate_python("update_pricing_plan"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -104,8 +104,8 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ) -> list[PricingPlanToServiceGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as( - RPCMethodName, "list_connected_services_to_pricing_plan_by_pricing_plan" + TypeAdapter(RPCMethodName).validate_python( + "list_connected_services_to_pricing_plan_by_pricing_plan" ), product_name=product_name, pricing_plan_id=pricing_plan_id, @@ -126,7 +126,7 @@ async def connect_service_to_pricing_plan( ) -> PricingPlanToServiceGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "connect_service_to_pricing_plan"), + TypeAdapter(RPCMethodName).validate_python("connect_service_to_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py index cec80e7186a..9851c55bc11 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py @@ -15,7 +15,7 @@ PricingUnitWithCostCreate, PricingUnitWithCostUpdate, ) -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -36,7 +36,7 @@ async def get_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_pricing_unit"), + TypeAdapter(RPCMethodName).validate_python("get_pricing_unit"), product_name=product_name, pricing_plan_id=pricing_plan_id, pricing_unit_id=pricing_unit_id, @@ -55,7 +55,7 @@ async def create_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "create_pricing_unit"), + TypeAdapter(RPCMethodName).validate_python("create_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -73,7 +73,7 @@ async def update_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "update_pricing_unit"), + TypeAdapter(RPCMethodName).validate_python("update_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py index e826363897a..efc04b2dba6 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyUrl, NonNegativeInt, parse_obj_as +from pydantic import AnyUrl, NonNegativeInt, TypeAdapter from ....logging_utils import log_decorator from ....rabbitmq import RabbitMQRPCClient @@ -44,7 +44,7 @@ async def get_service_run_page( ) -> ServiceRunPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_service_run_page"), + TypeAdapter(RPCMethodName).validate_python("get_service_run_page"), user_id=user_id, product_name=product_name, limit=limit, @@ -74,7 +74,9 @@ async def get_osparc_credits_aggregated_usages_page( ) -> OsparcCreditsAggregatedUsagesPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_osparc_credits_aggregated_usages_page"), + TypeAdapter(RPCMethodName).validate_python( + "get_osparc_credits_aggregated_usages_page" + ), user_id=user_id, product_name=product_name, limit=limit, @@ -102,7 +104,7 @@ async def export_service_runs( ) -> AnyUrl: result: AnyUrl = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "export_service_runs"), + TypeAdapter(RPCMethodName).validate_python("export_service_runs"), user_id=user_id, product_name=product_name, wallet_id=wallet_id, diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index 03847ae0b04..7bbee359e9d 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -10,8 +10,8 @@ import redis.asyncio as aioredis import redis.exceptions +from models_library.errors_classes import OsparcErrorMixin from pydantic import NonNegativeFloat, NonNegativeInt -from pydantic.errors import PydanticErrorMixin from redis.asyncio.lock import Lock from redis.asyncio.retry import Retry from redis.backoff import ExponentialBackoff @@ -36,7 +36,7 @@ _logger = logging.getLogger(__name__) -class BaseRedisError(PydanticErrorMixin, RuntimeError): +class BaseRedisError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py index 68d06d15278..f4fcc9b318c 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py @@ -9,7 +9,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from faker import Faker -from pydantic import BaseModel, parse_obj_as +from pydantic import BaseModel, TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskId @@ -92,7 +92,7 @@ async def _caller(client: TestClient, **query_kwargs) -> TaskId: data, error = await assert_status(resp, status.HTTP_202_ACCEPTED) assert data assert not error - task_get = parse_obj_as(long_running_tasks.server.TaskGet, data) + task_get = TypeAdapter(long_running_tasks.server.TaskGet).validate_python(data) return task_get.task_id return _caller diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py index afd9e8f4fde..55ddea8046b 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py @@ -18,7 +18,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks.server import TaskGet, TaskId @@ -216,7 +216,7 @@ async def test_list_tasks( result = await client.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == NUM_TASKS # the task name is properly formatted @@ -235,5 +235,5 @@ async def test_list_tasks( result = await client.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == NUM_TASKS - (task_index + 1) diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py index 941ae31359d..5671eda108f 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py @@ -18,7 +18,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pydantic import create_model, parse_obj_as +from pydantic import TypeAdapter, create_model from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status from servicelib.aiohttp.long_running_tasks._server import ( @@ -108,7 +108,7 @@ async def test_list_tasks( result = await client_with_task_context.get(f"{list_url}") data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == 0 # the list should be full if we pass the expected context @@ -117,7 +117,7 @@ async def test_list_tasks( ) data, error = await assert_status(result, status.HTTP_200_OK) assert not error - list_of_tasks = parse_obj_as(list[TaskGet], data) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(data) assert len(list_of_tasks) == 1 diff --git a/packages/service-library/tests/aiohttp/test_docker_utils.py b/packages/service-library/tests/aiohttp/test_docker_utils.py index 890ffdc588b..4b5c9747c7f 100644 --- a/packages/service-library/tests/aiohttp/test_docker_utils.py +++ b/packages/service-library/tests/aiohttp/test_docker_utils.py @@ -11,7 +11,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.progress_bar import ProgressReport -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from servicelib import progress_bar from servicelib.aiohttp.docker_utils import retrieve_image_layer_information @@ -42,8 +42,7 @@ async def test_retrieve_image_layer_information( if "sha256" in service_tag: image_name = f"{service_repo}@{service_tag}" await remove_images_from_host([image_name]) - docker_image = parse_obj_as( - DockerGenericTag, + docker_image = TypeAdapter(DockerGenericTag).validate_python( f"{registry_settings.REGISTRY_URL}/{osparc_service['image']['name']}:{osparc_service['image']['tag']}", ) layer_information = await retrieve_image_layer_information( diff --git a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py index 3ec3bde01ed..366759e22d3 100644 --- a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py +++ b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py @@ -5,7 +5,7 @@ from datetime import timedelta import pytest -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.deferred_tasks._models import TaskUID from servicelib.deferred_tasks._redis_task_tracker import RedisTaskTracker from servicelib.deferred_tasks._task_schedule import TaskScheduleModel, TaskState @@ -19,8 +19,7 @@ @pytest.fixture def task_schedule() -> TaskScheduleModel: - return parse_obj_as( - TaskScheduleModel, + return TypeAdapter(TaskScheduleModel).validate_python( { "timeout": timedelta(seconds=1), "execution_attempts": 1, diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py index bd55b44d498..bec8e3a6af2 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py @@ -18,7 +18,7 @@ from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.fastapi import long_running_tasks from servicelib.long_running_tasks._models import TaskGet, TaskId from servicelib.long_running_tasks._task import TaskContext @@ -94,7 +94,9 @@ async def _caller(app: FastAPI, client: AsyncClient, **query_kwargs) -> TaskId: ) resp = await client.post(f"{url}") assert resp.status_code == status.HTTP_202_ACCEPTED - task_id = parse_obj_as(long_running_tasks.server.TaskId, resp.json()) + task_id = TypeAdapter(long_running_tasks.server.TaskId).validate_python( + resp.json() + ) return task_id return _caller @@ -274,7 +276,7 @@ async def test_list_tasks_empty_list(app: FastAPI, client: AsyncClient): list_url = app.url_path_for("list_tasks") result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert list_of_tasks == [] @@ -296,7 +298,7 @@ async def test_list_tasks( list_url = app.url_path_for("list_tasks") result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert len(list_of_tasks) == NUM_TASKS # now wait for them to finish @@ -311,5 +313,5 @@ async def test_list_tasks( # the list shall go down one by one result = await client.get(f"{list_url}") assert result.status_code == status.HTTP_200_OK - list_of_tasks = parse_obj_as(list[TaskGet], result.json()) + list_of_tasks = TypeAdapter(list[TaskGet]).validate_python(result.json()) assert len(list_of_tasks) == NUM_TASKS - (task_index + 1) diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py index 9f15184b052..eefd44f3b64 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py @@ -8,7 +8,7 @@ from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter from servicelib.fastapi.long_running_tasks._context_manager import _ProgressManager from servicelib.fastapi.long_running_tasks.client import ( Client, @@ -90,7 +90,7 @@ async def bg_task_app( @pytest.fixture def mock_task_id() -> TaskId: - return parse_obj_as(TaskId, "fake_task_id") + return TypeAdapter(TaskId).validate_python("fake_task_id") async def test_task_result( @@ -100,7 +100,7 @@ async def test_task_result( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) async with periodic_task_result( client, @@ -120,7 +120,7 @@ async def test_task_result_times_out( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) timeout = TASK_SLEEP_INTERVAL / 10 with pytest.raises(TaskClientTimeoutError) as exec_info: @@ -146,7 +146,7 @@ async def test_task_result_task_result_is_an_error( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = parse_obj_as(AnyHttpUrl, "http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) with pytest.raises(TaskClientResultError) as exec_info: async with periodic_task_result( diff --git a/packages/service-library/tests/fastapi/test_docker_utils.py b/packages/service-library/tests/fastapi/test_docker_utils.py index 4db0db99bd0..ded21eb0f8b 100644 --- a/packages/service-library/tests/fastapi/test_docker_utils.py +++ b/packages/service-library/tests/fastapi/test_docker_utils.py @@ -12,7 +12,7 @@ from faker import Faker from models_library.docker import DockerGenericTag from models_library.progress_bar import ProgressReport -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from servicelib import progress_bar from servicelib.docker_utils import pull_image @@ -46,8 +46,7 @@ async def test_retrieve_image_layer_information( if "sha256" in service_tag: image_name = f"{service_repo}@{service_tag}" await remove_images_from_host([image_name]) - docker_image = parse_obj_as( - DockerGenericTag, + docker_image = TypeAdapter(DockerGenericTag).validate_python( f"{registry_settings.REGISTRY_URL}/{osparc_service['image']['name']}:{osparc_service['image']['tag']}", ) layer_information = await retrieve_image_layer_information( @@ -202,7 +201,7 @@ async def test_pull_image_without_layer_information( assert layer_information print(f"{image=} has {layer_information.layers_total_size=}") - fake_number_of_steps = parse_obj_as(ByteSize, "200MiB") + fake_number_of_steps = TypeAdapter(ByteSize).validate_python("200MiB") assert fake_number_of_steps > layer_information.layers_total_size async with progress_bar.ProgressBarData( num_steps=fake_number_of_steps, diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index f98de720c33..8ff5614db30 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -15,7 +15,7 @@ TransportError, codes, ) -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from respx import MockRouter from servicelib.fastapi.http_client_thin import ( BaseThinClient, @@ -77,7 +77,7 @@ async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: @pytest.fixture def test_url() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + return TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111") async def test_connection_error( diff --git a/packages/service-library/tests/test_archiving_utils.py b/packages/service-library/tests/test_archiving_utils.py index f6886ea509a..bb6f2b486c4 100644 --- a/packages/service-library/tests/test_archiving_utils.py +++ b/packages/service-library/tests/test_archiving_utils.py @@ -18,7 +18,7 @@ import pytest from faker import Faker -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from pytest_benchmark.plugin import BenchmarkFixture from servicelib import archiving_utils from servicelib.archiving_utils import ArchiveError, archive_dir, unarchive_dir @@ -566,7 +566,8 @@ async def _archive_dir_performance( @pytest.mark.skip(reason="manual testing") @pytest.mark.parametrize( - "compress, file_size, num_files", [(False, parse_obj_as(ByteSize, "1Mib"), 10000)] + "compress, file_size, num_files", + [(False, TypeAdapter(ByteSize).validate_python("1Mib"), 10000)], ) def test_archive_dir_performance( benchmark: BenchmarkFixture, From 8250affa1fab68d214aa3a0af0b0852107c390dc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 09:55:45 +0200 Subject: [PATCH 059/280] fix url type --- .../src/servicelib/fastapi/long_running_tasks/_client.py | 2 +- .../src/servicelib/long_running_tasks/_errors.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index 24fd85077d4..4e00cce29d7 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -140,7 +140,7 @@ async def get_task_status( ) -> TaskStatus: timeout = timeout or self._client_configuration.default_timeout result = await self._async_client.get( - self._get_url(f"/task/{task_id}"), + str(self._get_url(f"/task/{task_id}")), timeout=timeout, ) if result.status_code != status.HTTP_200_OK: diff --git a/packages/service-library/src/servicelib/long_running_tasks/_errors.py b/packages/service-library/src/servicelib/long_running_tasks/_errors.py index 73722f746ac..43fd5b8c6be 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_errors.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_errors.py @@ -1,10 +1,10 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class BaseLongRunningError(PydanticErrorMixin, Exception): +class BaseLongRunningError(OsparcErrorMixin, Exception): """base exception for this module""" - code: str = "long_running_task.base_long_running_error" + code: str = "long_running_task.base_long_running_error" # type: ignore[assignment] class TaskAlreadyRunningError(BaseLongRunningError): From 6c63ccf5ac00ebb445d3b3ed1d66fb6025117271 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 10:02:54 +0200 Subject: [PATCH 060/280] fix url type --- .../src/servicelib/fastapi/long_running_tasks/_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index 4e00cce29d7..38465d6487b 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -151,7 +151,7 @@ async def get_task_status( body=result.text, ) - return TaskStatus.parse_obj(result.json()) + return TaskStatus.model_validate(result.json()) @retry_on_http_errors async def get_task_result( @@ -159,7 +159,7 @@ async def get_task_result( ) -> Any | None: timeout = timeout or self._client_configuration.default_timeout result = await self._async_client.get( - self._get_url(f"/task/{task_id}/result"), + str(self._get_url(f"/task/{task_id}/result")), timeout=timeout, ) if result.status_code != status.HTTP_200_OK: @@ -170,7 +170,7 @@ async def get_task_result( body=result.text, ) - task_result = TaskResult.parse_obj(result.json()) + task_result = TaskResult.model_validate(result.json()) if task_result.error is not None: raise TaskClientResultError(message=task_result.error) return task_result.result @@ -181,7 +181,7 @@ async def cancel_and_delete_task( ) -> None: timeout = timeout or self._client_configuration.default_timeout result = await self._async_client.delete( - self._get_url(f"/task/{task_id}"), + str(self._get_url(f"/task/{task_id}")), timeout=timeout, ) From fd2f6cbace6a073e958dfb27e5497384f68dbc12 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 10:20:05 +0200 Subject: [PATCH 061/280] fix rabbit settings --- .../src/servicelib/rabbitmq/_errors.py | 8 ++++---- .../src/settings_library/rabbit.py | 18 ++++++++++-------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 0e3efbf3a11..93dbe0cd051 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -1,21 +1,21 @@ from typing import Final -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin _ERROR_PREFIX: Final[str] = "rabbitmq_error" -class BaseRPCError(PydanticErrorMixin, RuntimeError): +class BaseRPCError(OsparcErrorMixin, RuntimeError): ... class RPCNotInitializedError(BaseRPCError): - code = f"{_ERROR_PREFIX}.not_started" + code = f"{_ERROR_PREFIX}.not_started" # type: ignore[assignment] msg_template = "Please check that the RabbitMQ RPC backend was initialized!" class RemoteMethodNotRegisteredError(BaseRPCError): - code = f"{_ERROR_PREFIX}.remote_not_registered" + code = f"{_ERROR_PREFIX}.remote_not_registered" # type: ignore[assignment] msg_template = ( "Could not find a remote method named: '{method_name}'. " "Message from remote server was returned: {incoming_message}. " diff --git a/packages/settings-library/src/settings_library/rabbit.py b/packages/settings-library/src/settings_library/rabbit.py index 19c6af0b656..2b339755e78 100644 --- a/packages/settings-library/src/settings_library/rabbit.py +++ b/packages/settings-library/src/settings_library/rabbit.py @@ -1,6 +1,6 @@ from functools import cached_property -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pydantic.networks import AnyUrl from pydantic.types import SecretStr @@ -15,7 +15,7 @@ class RabbitDsn(AnyUrl): class RabbitSettings(BaseCustomSettings): # host RABBIT_HOST: str - RABBIT_PORT: PortInt = parse_obj_as(PortInt, 5672) + RABBIT_PORT: PortInt = TypeAdapter(PortInt).validate_python(5672) RABBIT_SECURE: bool # auth @@ -24,11 +24,13 @@ class RabbitSettings(BaseCustomSettings): @cached_property def dsn(self) -> str: - rabbit_dsn: str = RabbitDsn.build( - scheme="amqps" if self.RABBIT_SECURE else "amqp", - user=self.RABBIT_USER, - password=self.RABBIT_PASSWORD.get_secret_value(), - host=self.RABBIT_HOST, - port=f"{self.RABBIT_PORT}", + rabbit_dsn: str = str( + RabbitDsn.build( + scheme="amqps" if self.RABBIT_SECURE else "amqp", + username=self.RABBIT_USER, + password=self.RABBIT_PASSWORD.get_secret_value(), + host=self.RABBIT_HOST, + port=self.RABBIT_PORT, + ) ) return rabbit_dsn From 7a888f511177e1b9c1572f5a5dc1747dabe57bfe Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 10:26:11 +0200 Subject: [PATCH 062/280] upgrade deprecated method --- .../services_api_mocks_for_aiohttp_clients.py | 10 +++++----- packages/settings-library/tests/test_twilio.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index 8c0f0128281..f87b2e3b749 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -154,11 +154,11 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: def create_cluster_cb(url, **kwargs) -> CallbackResult: assert "json" in kwargs, f"missing body in call to {url}" assert url.query.get("user_id") - random_cluster = Cluster.parse_obj( + random_cluster = Cluster.model_validate( random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) return CallbackResult( - status=201, payload=json.loads(random_cluster.json(by_alias=True)) + status=201, payload=json.loads(random_cluster.model_dump_json(by_alias=True)) ) @@ -173,7 +173,7 @@ def list_clusters_cb(url, **kwargs) -> CallbackResult: random.choice( Cluster.model_config["json_schema_extra"]["examples"] ) - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ) for _ in range(3) ] @@ -194,7 +194,7 @@ def get_cluster_cb(url, **kwargs) -> CallbackResult: ), **{"id": cluster_id}, } - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ), ) @@ -225,7 +225,7 @@ def patch_cluster_cb(url, **kwargs) -> CallbackResult: ), **{"id": cluster_id}, } - ).json(by_alias=True) + ).model_dump_json(by_alias=True) ), ) diff --git a/packages/settings-library/tests/test_twilio.py b/packages/settings-library/tests/test_twilio.py index 6f2830ea4aa..1989fbe6a9f 100644 --- a/packages/settings-library/tests/test_twilio.py +++ b/packages/settings-library/tests/test_twilio.py @@ -20,7 +20,7 @@ def test_twilio_settings_within_envdevel( }, ) settings = TwilioSettings.create_from_envs() - print(settings.json(indent=2)) + print(settings.model_dump_json(indent=2)) assert settings From b43955ce62cc40381d630fc6a6512abb6f0d436a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 10:55:45 +0200 Subject: [PATCH 063/280] fix dns* creation --- .../src/settings_library/postgres.py | 47 ++++++++++++------- .../src/settings_library/redis.py | 10 ++-- 2 files changed, 34 insertions(+), 23 deletions(-) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index c79ec1231fa..c15740f068b 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,7 +1,14 @@ import urllib.parse from functools import cached_property -from pydantic import AliasChoices, Field, PostgresDsn, SecretStr, field_validator +from pydantic import ( + AliasChoices, + Field, + PostgresDsn, + SecretStr, + ValidationInfo, + field_validator, +) from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -41,33 +48,37 @@ class PostgresSettings(BaseCustomSettings): @field_validator("POSTGRES_MAXSIZE") @classmethod - def _check_size(cls, v, values): - if not (values["POSTGRES_MINSIZE"] <= v): - msg = f"assert POSTGRES_MINSIZE={values['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" + def _check_size(cls, v, info: ValidationInfo): + if not (info.data["POSTGRES_MINSIZE"] <= v): + msg = f"assert POSTGRES_MINSIZE={info.data['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" raise ValueError(msg) return v @cached_property def dsn(self) -> str: - dsn: str = PostgresDsn.build( - scheme="postgresql", - user=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=f"{self.POSTGRES_PORT}", - path=f"/{self.POSTGRES_DB}", + dsn: str = str( + PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"/{self.POSTGRES_DB}", + ) ) return dsn @cached_property def dsn_with_async_sqlalchemy(self) -> str: - dsn: str = PostgresDsn.build( - scheme="postgresql+asyncpg", - user=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=f"{self.POSTGRES_PORT}", - path=f"/{self.POSTGRES_DB}", + dsn: str = str( + PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql+asyncpg", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"/{self.POSTGRES_DB}", + ) ) return dsn diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index ecccad69c10..f5daf20d113 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pydantic.networks import RedisDsn from pydantic.types import SecretStr @@ -22,20 +22,20 @@ class RedisDatabase(int, Enum): class RedisSettings(BaseCustomSettings): # host REDIS_HOST: str = "redis" - REDIS_PORT: PortInt = parse_obj_as(PortInt, 6789) + REDIS_PORT: PortInt = TypeAdapter(PortInt).validate_python(6789) # auth REDIS_USER: str | None = None REDIS_PASSWORD: SecretStr | None = None def build_redis_dsn(self, db_index: RedisDatabase): - return RedisDsn.build( + return RedisDsn.build( # pylint: disable=no-member scheme="redis", - user=self.REDIS_USER or None, + username=self.REDIS_USER or None, password=( self.REDIS_PASSWORD.get_secret_value() if self.REDIS_PASSWORD else None ), host=self.REDIS_HOST, - port=f"{self.REDIS_PORT}", + port=self.REDIS_PORT, path=f"/{db_index}", ) From a1a7e24284275fd01e0d28de86d439930505a33a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 11:08:19 +0200 Subject: [PATCH 064/280] return redis dns as str --- .../src/settings_library/redis.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index f5daf20d113..6f9d7ad9d38 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -28,14 +28,16 @@ class RedisSettings(BaseCustomSettings): REDIS_USER: str | None = None REDIS_PASSWORD: SecretStr | None = None - def build_redis_dsn(self, db_index: RedisDatabase): - return RedisDsn.build( # pylint: disable=no-member - scheme="redis", - username=self.REDIS_USER or None, - password=( - self.REDIS_PASSWORD.get_secret_value() if self.REDIS_PASSWORD else None - ), - host=self.REDIS_HOST, - port=self.REDIS_PORT, - path=f"/{db_index}", + def build_redis_dsn(self, db_index: RedisDatabase) -> str: + return str( + RedisDsn.build( # pylint: disable=no-member + scheme="redis", + username=self.REDIS_USER or None, + password=( + self.REDIS_PASSWORD.get_secret_value() if self.REDIS_PASSWORD else None + ), + host=self.REDIS_HOST, + port=self.REDIS_PORT, + path=f"/{db_index}", + ) ) From d74f38006fa1f5b04a9aa36e3052c3837d3936fe Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 11:17:20 +0200 Subject: [PATCH 065/280] upgrade pydantic dependencies --- packages/simcore-sdk/requirements/_base.txt | 18 ++++++++++++++++-- packages/simcore-sdk/requirements/_test.txt | 8 +++++--- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 7d46fa1bcc9..28d31ff4fb2 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -127,7 +127,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -144,14 +144,28 @@ pydantic==2.9.1 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends -pydantic-core==2.23.3 + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 6a6b2d8eea7..fa838e38766 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -203,12 +203,12 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via # -c requirements/_base.txt # pydantic @@ -249,7 +249,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt From 40e34e57cbe296accecfdfa52546dc5be3c93b8a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 11:36:28 +0200 Subject: [PATCH 066/280] continue upgrading --- .../servicelib/fastapi/http_client_thin.py | 4 +- .../tests/aiohttp/test_requests_validation.py | 38 ++++++++++--------- .../tests/fastapi/test_http_client_thin.py | 13 ++++--- .../src/settings_library/utils_encoders.py | 2 +- 4 files changed, 30 insertions(+), 27 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py index e00e0d636a2..55daaef69ba 100644 --- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py +++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py @@ -7,7 +7,7 @@ from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response from httpx._types import TimeoutTypes, URLTypes -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -30,7 +30,7 @@ """ -class BaseClientError(PydanticErrorMixin, Exception): +class BaseClientError(OsparcErrorMixin, Exception): """Used as based for all the raised errors""" msg_template: str = "{message}" diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index 87cbf3f4d2a..a5f4d875946 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -111,7 +111,9 @@ def client(event_loop, aiohttp_client: Callable, faker: Faker) -> TestClient: async def _handler(request: web.Request) -> web.Response: # --------- UNDER TEST ------- # NOTE: app context does NOT need to be validated everytime! - context = MyRequestContext.parse_obj({**dict(request.app), **dict(request)}) + context = MyRequestContext.model_validate( + {**dict(request.app), **dict(request)} + ) path_params = parse_request_path_parameters_as( MyRequestPathParams, request, use_enveloped_error_v1=False @@ -129,11 +131,11 @@ async def _handler(request: web.Request) -> web.Response: return web.json_response( { - "parameters": path_params.dict(), - "queries": query_params.dict(), - "body": body.dict(), - "context": context.dict(), - "headers": headers_params.dict(), + "parameters": path_params.model_dump(), + "queries": query_params.model_dump(), + "body": body.model_dump(), + "context": context.model_dump(), + "headers": headers_params.model_dump(), }, dumps=json_dumps, ) @@ -221,8 +223,8 @@ async def test_parse_request_with_invalid_path_params( r = await client.get( "/projects/invalid-uuid", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(by_alias=True), + json=body.model_dump(), + headers=headers_params.model_dump(by_alias=True), ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" @@ -234,8 +236,8 @@ async def test_parse_request_with_invalid_path_params( "details": [ { "loc": "project_uuid", - "msg": "value is not a valid uuid", - "type": "type_error.uuid", + "msg": "Input should be a valid UUID, invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1", + "type": "uuid_parsing", } ], } @@ -265,8 +267,8 @@ async def test_parse_request_with_invalid_query_params( "details": [ { "loc": "label", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } ], } @@ -298,13 +300,13 @@ async def test_parse_request_with_invalid_body( "details": [ { "loc": "x", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, { "loc": "z", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", }, ], } @@ -353,8 +355,8 @@ async def test_parse_request_with_invalid_headers_params( "details": [ { "loc": "X-Simcore-User-Agent", - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } ], } diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index 8ff5614db30..dfe11467518 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -76,12 +76,13 @@ async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: @pytest.fixture -def test_url() -> AnyHttpUrl: - return TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111") +def test_url() -> str: + return str(TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111")) async def test_connection_error( - thick_client: FakeThickClient, test_url: AnyHttpUrl + thick_client: FakeThickClient, + test_url: str, ) -> None: with pytest.raises(ClientHttpError) as exe_info: await thick_client.get_provided_url(test_url) @@ -92,7 +93,7 @@ async def test_connection_error( async def test_retry_on_errors( request_timeout: int, - test_url: AnyHttpUrl, + test_url: str, caplog_info_level: pytest.LogCaptureFixture, ) -> None: client = FakeThickClient(total_retry_interval=request_timeout) @@ -108,7 +109,7 @@ async def test_retry_on_errors_by_error_type( error_class: type[RequestError], caplog_info_level: pytest.LogCaptureFixture, request_timeout: int, - test_url: AnyHttpUrl, + test_url: str, ) -> None: class ATestClient(BaseThinClient): # pylint: disable=no-self-use @@ -177,7 +178,7 @@ async def public_method_no_annotation(self): async def test_expect_state_decorator( - test_url: AnyHttpUrl, + test_url: str, respx_mock: MockRouter, request_timeout: int, ) -> None: diff --git a/packages/settings-library/src/settings_library/utils_encoders.py b/packages/settings-library/src/settings_library/utils_encoders.py index 71ea960bf78..f38e156b6a5 100644 --- a/packages/settings-library/src/settings_library/utils_encoders.py +++ b/packages/settings-library/src/settings_library/utils_encoders.py @@ -12,7 +12,7 @@ def create_json_encoder_wo_secrets(model_cls: type[BaseModel]): show_secrets_encoder = create_json_encoder_wo_secrets(type(model)) model.dict(encoder=show_secrets_encoder)['my_secret'] == "secret" """ - current_encoders = getattr(model_cls.Config, "json_encoders", {}) + current_encoders = getattr(model_cls.model_config, "json_encoders", {}) return partial( custom_pydantic_encoder, { From 2843a12cef1bc1f61a581610576492dd0bbec802 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 11:52:56 +0200 Subject: [PATCH 067/280] continue upgrading --- .../node_ports_common/_filemanager.py | 18 +++++++++--------- .../node_ports_common/aws_s3_cli.py | 5 +++-- .../node_ports_common/file_io_utils.py | 4 ++-- .../node_ports_common/filemanager.py | 8 +++++--- .../simcore_sdk/node_ports_common/r_clone.py | 7 ++++--- .../node_ports_common/r_clone_utils.py | 5 ++--- 6 files changed, 25 insertions(+), 22 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py index 7b5467c2851..a5305dd5b93 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py @@ -14,7 +14,7 @@ from models_library.projects_nodes_io import LocationID, LocationName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyUrl, parse_obj_as +from pydantic import AnyUrl, TypeAdapter from settings_library.node_ports import NodePortsSettings from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -42,7 +42,7 @@ async def _get_location_id_from_location_name( raise exceptions.S3InvalidStore(store) -def _get_https_link_if_storage_secure(url: AnyUrl) -> str: +def _get_https_link_if_storage_secure(url: str) -> str: # NOTE: links generated by storage are http only. # WEBSERVER -> STORAGE (http requests) # DY-SIDECAR (simcore-sdk) -> STORAGE (httpS requests) @@ -69,18 +69,18 @@ async def _complete_upload( :rtype: ETag """ async with session.post( - _get_https_link_if_storage_secure(upload_completion_link), + _get_https_link_if_storage_secure(str(upload_completion_link)), json=jsonable_encoder(FileUploadCompletionBody(parts=parts)), auth=get_basic_auth(), ) as resp: resp.raise_for_status() # now poll for state - file_upload_complete_response = parse_obj_as( - Envelope[FileUploadCompleteResponse], await resp.json() + file_upload_complete_response = TypeAdapter(Envelope[FileUploadCompleteResponse]).validate_python( + await resp.json() ) assert file_upload_complete_response.data # nosec state_url = _get_https_link_if_storage_secure( - file_upload_complete_response.data.links.state + str(file_upload_complete_response.data.links.state) ) _logger.info("completed upload of %s", f"{len(parts)} parts, received {state_url}") @@ -96,8 +96,8 @@ async def _complete_upload( with attempt: async with session.post(state_url, auth=get_basic_auth()) as resp: resp.raise_for_status() - future_enveloped = parse_obj_as( - Envelope[FileUploadCompleteFutureResponse], await resp.json() + future_enveloped = TypeAdapter(Envelope[FileUploadCompleteFutureResponse]).validate_python( + await resp.json() ) assert future_enveloped.data # nosec if future_enveloped.data.state == FileUploadCompleteState.NOK: @@ -142,7 +142,7 @@ async def _abort_upload( # abort the upload correctly, so it can revert back to last version try: async with session.post( - _get_https_link_if_storage_secure(abort_upload_link), auth=get_basic_auth() + _get_https_link_if_storage_secure(str(abort_upload_link)), auth=get_basic_auth() ) as resp: resp.raise_for_status() except ClientError: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py index 35d1d7c71f8..87264ced3da 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py @@ -6,10 +6,11 @@ from asyncio.streams import StreamReader from pathlib import Path +from models_library.errors_classes import OsparcErrorMixin + from aiocache import cached # type: ignore[import-untyped] from models_library.basic_types import IDStr from pydantic import AnyUrl, ByteSize -from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings @@ -24,7 +25,7 @@ _OSPARC_SYMLINK_EXTENSION = ".rclonelink" # named `rclonelink` to maintain backwards -class BaseAwsS3CliError(PydanticErrorMixin, RuntimeError): +class BaseAwsS3CliError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 695b710c8f8..5feefab82f8 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -253,7 +253,7 @@ def _check_for_aws_http_errors(exc: BaseException) -> bool: async def _session_put( session: ClientSession, file_part_size: int, - upload_url: AnyUrl, + upload_url: str, pbar: tqdm, io_log_redirect_cb: LogRedirectCB | None, progress_bar: ProgressBarData, @@ -314,7 +314,7 @@ async def _upload_file_part( received_e_tag = await _session_put( session=session, file_part_size=file_part_size, - upload_url=upload_url, + upload_url=str(upload_url), pbar=pbar, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index 6a5609c7eb5..f3e2587fab7 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -15,7 +15,7 @@ from models_library.basic_types import IDStr, SHA256Str from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter, parse_obj_as from servicelib.file_utils import create_sha256_checksum from servicelib.progress_bar import ProgressBarData from settings_library.aws_s3_cli import AwsS3CliSettings @@ -189,14 +189,16 @@ async def download_path_from_s3( aws_s3_cli_settings, progress_bar, local_directory_path=local_path, - download_s3_link=parse_obj_as(AnyUrl, f"{download_link}"), + download_s3_link=TypeAdapter(AnyUrl).validate_python(f"{download_link}"), ) elif r_clone_settings: await r_clone.sync_s3_to_local( r_clone_settings, progress_bar, local_directory_path=local_path, - download_s3_link=parse_obj_as(AnyUrl, f"{download_link}"), + download_s3_link=str( + TypeAdapter(AnyUrl).validate_python(f"{download_link}") + ), ) else: msg = "Unexpected configuration" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 18e15139493..90727e0dd21 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -8,11 +8,12 @@ from pathlib import Path from typing import Final +from models_library.errors_classes import OsparcErrorMixin + from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile from models_library.basic_types import IDStr from pydantic import AnyUrl, BaseModel, ByteSize -from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.r_clone import RCloneSettings @@ -31,7 +32,7 @@ _logger = logging.getLogger(__name__) -class BaseRCloneError(PydanticErrorMixin, RuntimeError): +class BaseRCloneError(OsparcErrorMixin, RuntimeError): ... @@ -279,7 +280,7 @@ async def sync_s3_to_local( progress_bar: ProgressBarData, *, local_directory_path: Path, - download_s3_link: AnyUrl, + download_s3_link: str, exclude_patterns: set[str] | None = None, debug_logs: bool = False, ) -> None: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index 0a059c8e5ff..a88dfaf203d 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -3,7 +3,7 @@ from typing import Union from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ConfigDict, Field, parse_raw_as +from pydantic import BaseModel, ByteSize, ConfigDict, Field, TypeAdapter from servicelib.logging_utils import log_catch from servicelib.progress_bar import ProgressBarData @@ -75,8 +75,7 @@ def __init__(self, progress_bar: ProgressBarData) -> None: async def __call__(self, logs: str) -> None: _logger.debug("received logs: %s", logs) with log_catch(_logger, reraise=False): - rclone_message: _RCloneSyncMessages = parse_raw_as( - _RCloneSyncMessages, # type: ignore[arg-type] + rclone_message: _RCloneSyncMessages = TypeAdapter(_RCloneSyncMessages).validate_strings( logs, ) From c82afa54b63355a176cbc87cd9efe07cf7bf54f4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 12:19:44 +0200 Subject: [PATCH 068/280] fix anyhttpurl validation --- .../aiohttp/long_running_tasks/_server.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index df81371cbb8..d94914fcf91 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -6,7 +6,7 @@ from aiohttp import web from models_library.utils.json_serialization import json_dumps -from pydantic import AnyHttpUrl, PositiveFloat +from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter from ...aiohttp import status from ...long_running_tasks._models import TaskGet @@ -67,17 +67,14 @@ async def start_long_running_task( ip_addr, port = request_.transport.get_extra_info( "sockname" ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info - status_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}", - scheme="http", + status_url = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" ) - result_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}", - scheme="http", + result_url = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" ) - abort_url = AnyHttpUrl( - url=f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}", - scheme="http", + abort_url = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" ) task_get = TaskGet( task_id=task_id, From c96cd214fdebefb0d524706a6da862196bb3cf89 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 12:32:25 +0200 Subject: [PATCH 069/280] continue upgrading --- .../src/servicelib/aiohttp/requests_validation.py | 2 +- packages/service-library/src/servicelib/background_task.py | 4 ++-- packages/service-library/src/servicelib/fastapi/errors.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index e5ab75d4d6d..59d4b69ce30 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -228,4 +228,4 @@ async def parse_request_body_as( return model_schema_cls.model_validate(body) # type: ignore [return-value] # used for model_schema like 'list[T]' or 'dict[T]' - return TypeAdapter(model_schema_cls).validate_python(body) + return TypeAdapter(model_schema_cls).validate_python(body) # type: ignore[no-any-return] diff --git a/packages/service-library/src/servicelib/background_task.py b/packages/service-library/src/servicelib/background_task.py index e7a4c665c49..26f0dd25050 100644 --- a/packages/service-library/src/servicelib/background_task.py +++ b/packages/service-library/src/servicelib/background_task.py @@ -5,7 +5,7 @@ from collections.abc import AsyncIterator, Awaitable, Callable from typing import Final -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin from tenacity import TryAgain from tenacity.asyncio import AsyncRetrying from tenacity.stop import stop_after_attempt @@ -21,7 +21,7 @@ _MAX_TASK_CANCELLATION_ATTEMPTS: Final[int] = 3 -class PeriodicTaskCancellationError(PydanticErrorMixin, Exception): +class PeriodicTaskCancellationError(OsparcErrorMixin, Exception): msg_template: str = "Could not cancel task '{task_name}'" diff --git a/packages/service-library/src/servicelib/fastapi/errors.py b/packages/service-library/src/servicelib/fastapi/errors.py index 9eebef84637..136bb7c2fd6 100644 --- a/packages/service-library/src/servicelib/fastapi/errors.py +++ b/packages/service-library/src/servicelib/fastapi/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class ApplicationRuntimeError(PydanticErrorMixin, RuntimeError): +class ApplicationRuntimeError(OsparcErrorMixin, RuntimeError): pass From dfe63b571d84c724347107a35631e6969adac7fc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 12:38:12 +0200 Subject: [PATCH 070/280] continue upgrading --- .../aiohttp/long_running_tasks/test_long_running_tasks.py | 4 ++-- .../tests/deferred_tasks/test_deferred_tasks.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py index 55ddea8046b..7907f092c24 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py @@ -75,12 +75,12 @@ async def test_workflow( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.parse_obj(data) + task_status = long_running_tasks.server.TaskStatus.model_validate(data) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) ) - print(f"<-- received task status: {task_status.json(indent=2)}") + print(f"<-- received task status: {task_status.model_dump_json(indent=2)}") assert task_status.done, "task incomplete" print( f"-- waiting for task status completed successfully: {json.dumps(attempt.retry_state.retry_object.statistics, indent=2)}" diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index 0ea55a62eee..ca77d8a1a6b 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -158,8 +158,8 @@ async def start(self) -> None: response = await _tcp_command( "init-context", { - "rabbit": self.rabbit_service.json(**_get_serialization_options()), - "redis": self.redis_service.json(**_get_serialization_options()), + "rabbit": self.rabbit_service.model_dump_json(**_get_serialization_options()), + "redis": self.redis_service.model_dump_json(**_get_serialization_options()), "max-workers": self.max_workers, }, port=self.remote_process.port, From 0df8f6041a30cbc850585320d9b8992fdc14d515 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 12:51:37 +0200 Subject: [PATCH 071/280] add ignore --- .../src/servicelib/fastapi/long_running_tasks/_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py index c5d7429f01a..e8306b6d187 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py @@ -50,4 +50,4 @@ async def on_shutdown() -> None: # add error handlers # NOTE: Exception handler can not be added during the on_startup script, otherwise not working correctly - app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) + app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) # type: ignore[arg-type] From 56c1c89676243edb5a1bd55916aad9c04ae332fb Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 13:10:00 +0200 Subject: [PATCH 072/280] fix errors messages --- .../fastapi/long_running_tasks/test_long_running_tasks.py | 6 ++++-- .../service-library/tests/rabbitmq/test_rabbitmq_utils.py | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py index bec8e3a6af2..75605862287 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py @@ -151,12 +151,14 @@ async def test_workflow( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.parse_obj(result.json()) + task_status = long_running_tasks.server.TaskStatus.model_validate( + result.json() + ) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) ) - print(f"<-- received task status: {task_status.json(indent=2)}") + print(f"<-- received task status: {task_status.model_dump_json(indent=2)}") assert task_status.done, "task incomplete" print( f"-- waiting for task status completed successfully: {json.dumps(attempt.retry_state.retry_object.statistics, indent=2)}" diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py index b07f8e8cb8d..2615a92ac56 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_utils.py @@ -26,18 +26,18 @@ def test_rpc_namespace_sorts_elements(): def test_rpc_namespace_too_long(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({f"test{i}": f"test{i}" for i in range(20)}) - assert "ensure this value has at most 252 characters" in f"{exec_info.value}" + assert "String should have at most 252 characters" in f"{exec_info.value}" @pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() # no rabbitmq instance running def test_rpc_namespace_too_short(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({}) - assert "ensure this value has at least 1 characters" in f"{exec_info.value}" + assert "String should have at least 1 character" in f"{exec_info.value}" @pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() # no rabbitmq instance running def test_rpc_namespace_invalid_symbols(): with pytest.raises(ValidationError) as exec_info: RPCNamespace.from_entries({"test": "@"}) - assert "string does not match regex" in f"{exec_info.value}" + assert "String should match pattern" in f"{exec_info.value}" From 3b15198dd17c0f55bc764119bae7b4a900fd90e9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 13:53:10 +0200 Subject: [PATCH 073/280] continue upgrading --- .../service-library/tests/rabbitmq/test_rabbitmq_connection.py | 2 +- packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py index 3019b07d6ab..1c6357a637d 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py @@ -89,7 +89,7 @@ async def test_rabbit_client_with_paused_container( def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, str]: return { "scheme": "http", - "user": rabbit_service.RABBIT_USER, + "username": rabbit_service.RABBIT_USER, "password": rabbit_service.RABBIT_PASSWORD.get_secret_value(), "host": rabbit_service.RABBIT_HOST, "port": "15672", diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py index e192afc611e..996e8e6dc4c 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py @@ -354,7 +354,7 @@ async def _a_handler() -> None: await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler ) - assert "ensure this value has at most 255 characters" in f"{exec_info.value}" + assert "String should have at most 252 characters" in f"{exec_info.value}" else: await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler From bb10a3d40f5fa84f6bb97461bda3e349cd2cfa13 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 14:04:31 +0200 Subject: [PATCH 074/280] continue upgrading --- .../requirements/_base.txt | 4 ++-- .../src/service_integration/osparc_config.py | 19 ++++++++++--------- .../tests/test_osparc_config.py | 4 ++-- .../tests/test_osparc_runtime_specs.py | 10 +++++----- 4 files changed, 19 insertions(+), 18 deletions(-) diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index eb2fdda677a..bbd97171063 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -68,14 +68,14 @@ packaging==24.1 # via pytest pluggy==1.5.0 # via pytest -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pydantic-extra-types==2.9.0 # via -r requirements/../../../packages/models-library/requirements/_base.in diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 975109a87e2..600252d825b 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -40,6 +40,7 @@ ConfigDict, NonNegativeInt, ValidationError, + ValidationInfo, field_validator, model_validator, ) @@ -71,7 +72,7 @@ class DockerComposeOverwriteConfig(ComposeSpecification): def create_default( cls, service_name: str | None = None ) -> "DockerComposeOverwriteConfig": - model: "DockerComposeOverwriteConfig" = cls.parse_obj( + model: "DockerComposeOverwriteConfig" = cls.model_validate( { "services": { service_name: { @@ -88,7 +89,7 @@ def create_default( def from_yaml(cls, path: Path) -> "DockerComposeOverwriteConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "DockerComposeOverwriteConfig" = cls.parse_obj(data) + model: "DockerComposeOverwriteConfig" = cls.model_validate(data) return model @@ -107,9 +108,9 @@ class MetadataConfig(ServiceMetaDataPublished): @field_validator("contact") @classmethod - def _check_contact_in_authors(cls, v, values): + def _check_contact_in_authors(cls, v, info: ValidationInfo): """catalog service relies on contact and author to define access rights""" - authors_emails = {author.email for author in values["authors"]} + authors_emails = {author.email for author in info.data["authors"]} if v not in authors_emails: msg = "Contact {v} must be registered as an author" raise ValueError(msg) @@ -119,7 +120,7 @@ def _check_contact_in_authors(cls, v, values): def from_yaml(cls, path: Path) -> "MetadataConfig": with path.open() as fh: data = yaml_safe_load(fh) - model: "MetadataConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.model_validate(data) return model @classmethod @@ -127,7 +128,7 @@ def from_labels_annotations(cls, labels: dict[str, str]) -> "MetadataConfig": data = from_labels( labels, prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False ) - model: "MetadataConfig" = cls.parse_obj(data) + model: "MetadataConfig" = cls.model_validate(data) return model def to_labels_annotations(self) -> dict[str, str]: @@ -189,8 +190,8 @@ def ensure_backwards_compatible_setting_type(cls, v): @field_validator("value", mode="before") @classmethod - def check_value_against_custom_types(cls, v, values): - if (type_ := values.get("type_")) and type_ == "ContainerSpec": + def check_value_against_custom_types(cls, v, info: ValidationInfo): + if (type_ := info.data.get("type_")) and type_ == "ContainerSpec": ContainerSpec.model_validate(v) return v @@ -233,7 +234,7 @@ def ensure_compatibility(cls, v): # NOTE: if changes are applied to `DynamicSidecarServiceLabels` # these are also validated when ooil runs. try: - ValidatingDynamicSidecarServiceLabels.parse_obj(v) + ValidatingDynamicSidecarServiceLabels.model_validate(v) except ValidationError: _logger.exception( "Could not validate %s via %s", diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index b258f2fc4c8..10b3e9d9bbc 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -82,7 +82,7 @@ def test_settings_item_in_sync_with_service_settings_label( # First we parse with SimcoreServiceSettingLabelEntry since it also supports backwards compatibility # and will upgrade old version - example_model = SimcoreServiceSettingLabelEntry.parse_obj(example_data) + example_model = SimcoreServiceSettingLabelEntry.model_validate(example_data) # SettingsItem is exclusively for NEW labels, so it should not support backwards compatibility new_model = SettingsItem( @@ -92,4 +92,4 @@ def test_settings_item_in_sync_with_service_settings_label( ) # check back - SimcoreServiceSettingLabelEntry.parse_obj(new_model.dict(by_alias=True)) + SimcoreServiceSettingLabelEntry.model_validate(new_model.dict(by_alias=True)) diff --git a/packages/service-integration/tests/test_osparc_runtime_specs.py b/packages/service-integration/tests/test_osparc_runtime_specs.py index 74d63e15e5b..153c85d27c4 100644 --- a/packages/service-integration/tests/test_osparc_runtime_specs.py +++ b/packages/service-integration/tests/test_osparc_runtime_specs.py @@ -17,8 +17,8 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): osparc_spec: dict = yaml.safe_load((tests_data_dir / "runtime.yml").read_text()) - pm_spec1 = PathMappingsLabel.parse_obj(osparc_spec["paths-mapping"]) - pm_spec2 = PathMappingsLabel.parse_obj( + pm_spec1 = PathMappingsLabel.model_validate(osparc_spec["paths-mapping"]) + pm_spec2 = PathMappingsLabel.model_validate( { "outputs_path": "/outputs", "inputs_path": "/inputs", @@ -58,12 +58,12 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): # FIXME: ensure all sources are different! (e.g. a/b/c and z/c have the same name!) - print(Service(volumes=volumes).json(exclude_unset=True, indent=2)) + print(Service(volumes=volumes).model_dump_json(exclude_unset=True, indent=2)) # TODO: _auto_map_to_service(osparc_spec["settings"]) data = {} for obj in osparc_spec["settings"]: - item = SettingsItem.parse_obj(obj) + item = SettingsItem.model_validate(obj) if item.name == "resources": # https://docs.docker.com/compose/compose-file/compose-file-v3/#resources @@ -87,7 +87,7 @@ def test_create_runtime_spec_impl(tests_data_dir: Path): else: raise AssertionError(item) - print(Service(**data).json(exclude_unset=True, indent=2)) + print(Service(**data).model_dump_json(exclude_unset=True, indent=2)) def test_compatibility(): From ec0404a7e3de5e0f4045895762fd06700f404736 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 14:13:46 +0200 Subject: [PATCH 075/280] continue upgrading --- packages/service-integration/tests/test_osparc_config.py | 6 +++--- .../service-integration/tests/test_osparc_image_specs.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index 10b3e9d9bbc..f9c03ed93ec 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -52,8 +52,8 @@ def test_load_from_labels( runtime_cfg = RuntimeConfig.from_labels_annotations(labels) assert runtime_cfg.callbacks_mapping is not None - print(meta_cfg.json(exclude_unset=True, indent=2)) - print(runtime_cfg.json(exclude_unset=True, indent=2)) + print(meta_cfg.model_dump_json(exclude_unset=True, indent=2)) + print(runtime_cfg.model_dump_json(exclude_unset=True, indent=2)) # create yamls from config for model in (runtime_cfg, meta_cfg): @@ -62,7 +62,7 @@ def test_load_from_labels( ) with open(config_path, "w") as fh: data = json.loads( - model.json(exclude_unset=True, by_alias=True, exclude_none=True) + model.model_dump_json(exclude_unset=True, by_alias=True, exclude_none=True) ) yaml.safe_dump(data, fh, sort_keys=False) diff --git a/packages/service-integration/tests/test_osparc_image_specs.py b/packages/service-integration/tests/test_osparc_image_specs.py index b482bc85a4c..6bec87425ad 100644 --- a/packages/service-integration/tests/test_osparc_image_specs.py +++ b/packages/service-integration/tests/test_osparc_image_specs.py @@ -58,8 +58,8 @@ def test_create_image_spec_impl(tests_data_dir: Path, settings: AppSettings): assert build_spec assert isinstance(build_spec, BaseModel) - print(build_spec.json(exclude_unset=True, indent=2)) - print(yaml.safe_dump(compose_spec.dict(exclude_unset=True), sort_keys=False)) + print(build_spec.model_dump_json(exclude_unset=True, indent=2)) + print(yaml.safe_dump(compose_spec.model_dump(exclude_unset=True), sort_keys=False)) def test_image_digest_is_not_a_label_annotation(tests_data_dir: Path): From 96a0663862ef221fde1e626f146b01e24471a15e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 14:18:20 +0200 Subject: [PATCH 076/280] continue upgrading --- .../src/service_integration/osparc_image_specs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-integration/src/service_integration/osparc_image_specs.py b/packages/service-integration/src/service_integration/osparc_image_specs.py index df97e7c18b1..ff4f36c3f7e 100644 --- a/packages/service-integration/src/service_integration/osparc_image_specs.py +++ b/packages/service-integration/src/service_integration/osparc_image_specs.py @@ -41,7 +41,7 @@ def create_image_spec( docker_compose_overwrite_cfg.services[service_name].build.labels = labels - overwrite_options = docker_compose_overwrite_cfg.services[service_name].build.dict( + overwrite_options = docker_compose_overwrite_cfg.services[service_name].build.model_dump( exclude_none=True ) build_spec = BuildItem(**overwrite_options) From ad7ddb5c0f3d11937aab628433d173a1a5263c70 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 14:46:19 +0200 Subject: [PATCH 077/280] continue upgrading --- .../service-integration/src/service_integration/errors.py | 6 +++--- .../src/service_integration/osparc_config.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/service-integration/src/service_integration/errors.py b/packages/service-integration/src/service_integration/errors.py index 8d216b7d918..f1b5485092c 100644 --- a/packages/service-integration/src/service_integration/errors.py +++ b/packages/service-integration/src/service_integration/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class ServiceIntegrationError(PydanticErrorMixin, RuntimeError): +class ServiceIntegrationError(OsparcErrorMixin, RuntimeError): pass @@ -13,5 +13,5 @@ class UndefinedOciImageSpecError(ServiceIntegrationError): ... -class InvalidLabelsError(PydanticErrorMixin, ValueError): +class InvalidLabelsError(OsparcErrorMixin, ValueError): template_msg = "Invalid build labels {build_labels}" diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 600252d825b..9382b98b447 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -133,7 +133,7 @@ def from_labels_annotations(cls, labels: dict[str, str]) -> "MetadataConfig": def to_labels_annotations(self) -> dict[str, str]: labels: dict[str, str] = to_labels( - self.dict(exclude_unset=True, by_alias=True, exclude_none=True), + self.model_dump(exclude_unset=True, by_alias=True, exclude_none=True), prefix_key=OSPARC_LABEL_PREFIXES[0], trim_key_head=False, ) From 6369b33b84eb1d7605a0a92cce156c8f0af99355 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 14:58:46 +0200 Subject: [PATCH 078/280] fix test --- .../tests/rabbitmq/test_rabbitmq_connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py index 1c6357a637d..ba7576e3027 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_connection.py @@ -86,13 +86,13 @@ async def test_rabbit_client_with_paused_container( await rabbit_client.publish(exchange_name, message) -def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, str]: +def _get_rabbitmq_api_params(rabbit_service: RabbitSettings) -> dict[str, Any]: return { "scheme": "http", "username": rabbit_service.RABBIT_USER, "password": rabbit_service.RABBIT_PASSWORD.get_secret_value(), "host": rabbit_service.RABBIT_HOST, - "port": "15672", + "port": 15672, } From 7d5537d429f76c10e4cb3453a04d366f90614b63 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 16:06:54 +0200 Subject: [PATCH 079/280] continue upgrading --- packages/service-integration/tests/test_command_compose.py | 2 +- .../settings-library/src/settings_library/utils_service.py | 4 ++-- packages/settings-library/tests/test_utils_service.py | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/service-integration/tests/test_command_compose.py b/packages/service-integration/tests/test_command_compose.py index 371d8a9dbdc..4333a28771f 100644 --- a/packages/service-integration/tests/test_command_compose.py +++ b/packages/service-integration/tests/test_command_compose.py @@ -39,7 +39,7 @@ def test_make_docker_compose_meta( assert target_compose_specs.exists() # valid compose specs - compose_cfg = ComposeSpecification.parse_obj( + compose_cfg = ComposeSpecification.model_validate( yaml.safe_load(target_compose_specs.read_text()) ) assert compose_cfg.services diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index e7bb66057c5..99571eef230 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -103,7 +103,7 @@ def _compose_url( else "http" ), "host": self._safe_getattr(f"{prefix}_HOST", URLPart.REQUIRED), - "user": self._safe_getattr(f"{prefix}_USER", user), + "username": self._safe_getattr(f"{prefix}_USER", user), "password": self._safe_getattr(f"{prefix}_PASSWORD", password), "port": self._safe_getattr(f"{prefix}_PORT", port), } @@ -125,7 +125,7 @@ def _compose_url( assert all(isinstance(v, str) or v is None for v in kwargs.values()) # nosec - composed_url: str = AnyUrl.build(**kwargs) + composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] return composed_url def _build_api_base_url(self, *, prefix: str) -> str: diff --git a/packages/settings-library/tests/test_utils_service.py b/packages/settings-library/tests/test_utils_service.py index a3638f9b31e..ab6a4a5db6f 100644 --- a/packages/settings-library/tests/test_utils_service.py +++ b/packages/settings-library/tests/test_utils_service.py @@ -5,7 +5,7 @@ from functools import cached_property import pytest -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pydantic.types import SecretStr from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag @@ -88,8 +88,8 @@ def test_service_settings_base_urls(service_settings_cls: type): settings_with_defaults = service_settings_cls() - base_url = parse_obj_as(AnyHttpUrl, settings_with_defaults.base_url) - api_base_url = parse_obj_as(AnyHttpUrl, settings_with_defaults.api_base_url) + base_url = TypeAdapter(AnyHttpUrl).validate_python(settings_with_defaults.base_url) + api_base_url = TypeAdapter(AnyHttpUrl).validate_python(settings_with_defaults.api_base_url) assert base_url.path != api_base_url.path assert (base_url.scheme, base_url.host, base_url.port) == ( From 486437d8ad52a49cb594e9c5084df9cafc486326 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 16:12:12 +0200 Subject: [PATCH 080/280] update error msg --- packages/settings-library/tests/test_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 2809d9787e8..38058d99872 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -169,8 +169,8 @@ def test_create_settings_class_without_environ_fails( assert err_info.value.errors()[0] == { "loc": ("VALUE_DEFAULT_ENV", "S_VALUE"), - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", } From 0167480e924e412ced48966a96dc3331eeedcc8b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 16:15:00 +0200 Subject: [PATCH 081/280] fix model_fields iteration --- packages/settings-library/tests/test_base.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 38058d99872..c130a94feb7 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -39,8 +39,8 @@ def _get_attrs_tree(obj: Any) -> dict[str, Any]: def _print_defaults(model_cls: type[BaseModel]): - for field in model_cls.__fields__.values(): - print(field.name, ":", end="") + for name, field in model_cls.model_fields.items(): + print(name, ":", end="") try: default = field.get_default() print(default, type(default)) @@ -49,7 +49,7 @@ def _print_defaults(model_cls: type[BaseModel]): def _dumps_model_class(model_cls: type[BaseModel]): - d = {field.name: _get_attrs_tree(field) for field in model_cls.__fields__.values()} + d = {name: _get_attrs_tree(field) for name, field in model_cls.model_fields.items()} return json.dumps(d, indent=1) @@ -102,14 +102,14 @@ def test_create_settings_class( # DEV: Path("M1.ignore.json").write_text(dumps_model_class(M)) - assert M.__fields__["VALUE_NULLABLE_DEFAULT_ENV"].default_factory + assert M.model_fields["VALUE_NULLABLE_DEFAULT_ENV"].default_factory - assert M.__fields__["VALUE_NULLABLE_DEFAULT_ENV"].get_default() is None + assert M.model_fields["VALUE_NULLABLE_DEFAULT_ENV"].get_default() is None - assert M.__fields__["VALUE_DEFAULT_ENV"].default_factory + assert M.model_fields["VALUE_DEFAULT_ENV"].default_factory with pytest.raises(DefaultFromEnvFactoryError): - M.__fields__["VALUE_DEFAULT_ENV"].get_default() + M.model_fields["VALUE_DEFAULT_ENV"].get_default() def test_create_settings_class_with_environment( @@ -137,10 +137,10 @@ def test_create_settings_class_with_environment( instance = SettingsClass() - print(instance.json(indent=2)) + print(instance.model_dump_json(indent=2)) # checks - assert instance.dict(exclude_unset=True) == { + assert instance.model_dump(exclude_unset=True) == { "VALUE": {"S_VALUE": 2}, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, } From 1409f8714e23c5032f1f52359d20a0cfd40dffaf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 22:30:51 +0200 Subject: [PATCH 082/280] fix labels type --- .../src/service_integration/osparc_image_specs.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/service-integration/src/service_integration/osparc_image_specs.py b/packages/service-integration/src/service_integration/osparc_image_specs.py index ff4f36c3f7e..1ba13799f1d 100644 --- a/packages/service-integration/src/service_integration/osparc_image_specs.py +++ b/packages/service-integration/src/service_integration/osparc_image_specs.py @@ -3,6 +3,8 @@ """ +from pydantic import TypeAdapter +from service_integration._compose_spec_model_autogenerated import ListOrDict from service_integration.compose_spec_model import ( BuildItem, ComposeSpecification, @@ -39,11 +41,13 @@ def create_image_spec( if not docker_compose_overwrite_cfg.services[service_name].build.context: docker_compose_overwrite_cfg.services[service_name].build.context = "./" - docker_compose_overwrite_cfg.services[service_name].build.labels = labels + docker_compose_overwrite_cfg.services[service_name].build.labels = TypeAdapter( + ListOrDict + ).validate_python(labels) - overwrite_options = docker_compose_overwrite_cfg.services[service_name].build.model_dump( - exclude_none=True - ) + overwrite_options = docker_compose_overwrite_cfg.services[ + service_name + ].build.model_dump(exclude_none=True) build_spec = BuildItem(**overwrite_options) return ComposeSpecification( From 94749e6cc5c5f59e639dea69a78f22fd49d668a8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 22:35:26 +0200 Subject: [PATCH 083/280] fix root field reference --- packages/service-integration/tests/test_command_compose.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-integration/tests/test_command_compose.py b/packages/service-integration/tests/test_command_compose.py index 4333a28771f..50f8b5b67b4 100644 --- a/packages/service-integration/tests/test_command_compose.py +++ b/packages/service-integration/tests/test_command_compose.py @@ -48,8 +48,8 @@ def test_make_docker_compose_meta( compose_labels = compose_cfg.services[metadata_cfg.service_name()].build.labels assert compose_labels - assert isinstance(compose_labels.__root__, dict) + assert isinstance(compose_labels.root, dict) assert ( - MetadataConfig.from_labels_annotations(compose_labels.__root__) == metadata_cfg + MetadataConfig.from_labels_annotations(compose_labels.root) == metadata_cfg ) From 223e2ac435cd6192b3c6102739132eab4f38c660 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 22:51:42 +0200 Subject: [PATCH 084/280] continue upgrading --- .../src/service_integration/cli/_config.py | 6 +++--- packages/service-integration/tests/test_oci_image_spec.py | 2 +- packages/service-integration/tests/test_osparc_config.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/service-integration/src/service_integration/cli/_config.py b/packages/service-integration/src/service_integration/cli/_config.py index 2f41dcb6f72..4437907efa0 100644 --- a/packages/service-integration/src/service_integration/cli/_config.py +++ b/packages/service-integration/src/service_integration/cli/_config.py @@ -25,7 +25,7 @@ def _get_labels_or_raise(build_labels) -> dict[str, str]: return dict(item.strip().split("=") for item in build_labels) if isinstance(build_labels, dict): return build_labels - if labels__root__ := build_labels.__root__: + if labels__root__ := build_labels.root: assert isinstance(labels__root__, dict) # nosec return labels__root__ raise InvalidLabelsError(build_labels=build_labels) @@ -39,7 +39,7 @@ def _create_config_from_compose_spec( ): rich.print(f"Creating osparc config files from {compose_spec_path}") - compose_spec = ComposeSpecification.parse_obj( + compose_spec = ComposeSpecification.model_validate( yaml.safe_load(compose_spec_path.read_text()) ) @@ -56,7 +56,7 @@ def _save(service_name: str, filename: Path, model: BaseModel): rich.print(f"Creating {output_path} ...", end="") with output_path.open("wt") as fh: - data = json.loads(model.json(by_alias=True, exclude_none=True)) + data = json.loads(model.model_dump_json(by_alias=True, exclude_none=True)) yaml.safe_dump(data, fh, sort_keys=False) rich.print("DONE") diff --git a/packages/service-integration/tests/test_oci_image_spec.py b/packages/service-integration/tests/test_oci_image_spec.py index ef2bd8b47d9..641594c9966 100644 --- a/packages/service-integration/tests/test_oci_image_spec.py +++ b/packages/service-integration/tests/test_oci_image_spec.py @@ -18,7 +18,7 @@ def test_label_schema_to_oci_conversion(monkeypatch): lsa = LabelSchemaAnnotations.create_from_env() - OciImageSpecAnnotations.parse_obj(lsa.to_oci_data()) + OciImageSpecAnnotations.model_validate(lsa.to_oci_data()) def test_create_annotations_from_metadata(tests_data_dir: Path): diff --git a/packages/service-integration/tests/test_osparc_config.py b/packages/service-integration/tests/test_osparc_config.py index f9c03ed93ec..9a5a8bd7a81 100644 --- a/packages/service-integration/tests/test_osparc_config.py +++ b/packages/service-integration/tests/test_osparc_config.py @@ -92,4 +92,4 @@ def test_settings_item_in_sync_with_service_settings_label( ) # check back - SimcoreServiceSettingLabelEntry.model_validate(new_model.dict(by_alias=True)) + SimcoreServiceSettingLabelEntry.model_validate(new_model.model_dump(by_alias=True)) From a9d30ab7d947e1a408e965c29e0e4d6e814ac9f8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 22:58:45 +0200 Subject: [PATCH 085/280] fix test --- .../src/service_integration/osparc_image_specs.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/packages/service-integration/src/service_integration/osparc_image_specs.py b/packages/service-integration/src/service_integration/osparc_image_specs.py index 1ba13799f1d..7f6dec6ca15 100644 --- a/packages/service-integration/src/service_integration/osparc_image_specs.py +++ b/packages/service-integration/src/service_integration/osparc_image_specs.py @@ -3,8 +3,6 @@ """ -from pydantic import TypeAdapter -from service_integration._compose_spec_model_autogenerated import ListOrDict from service_integration.compose_spec_model import ( BuildItem, ComposeSpecification, @@ -41,13 +39,11 @@ def create_image_spec( if not docker_compose_overwrite_cfg.services[service_name].build.context: docker_compose_overwrite_cfg.services[service_name].build.context = "./" - docker_compose_overwrite_cfg.services[service_name].build.labels = TypeAdapter( - ListOrDict - ).validate_python(labels) + docker_compose_overwrite_cfg.services[service_name].build.labels = labels overwrite_options = docker_compose_overwrite_cfg.services[ service_name - ].build.model_dump(exclude_none=True) + ].build.model_dump(exclude_none=True, serialize_as_any=True) build_spec = BuildItem(**overwrite_options) return ComposeSpecification( From 2a497f3f5b4fc90c2abd46dc269fd8f492a9d63d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 23:07:37 +0200 Subject: [PATCH 086/280] remove deprecated method --- packages/service-integration/tests/test_compose_spec_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-integration/tests/test_compose_spec_model.py b/packages/service-integration/tests/test_compose_spec_model.py index 63cd0924c99..416dfbb8eef 100644 --- a/packages/service-integration/tests/test_compose_spec_model.py +++ b/packages/service-integration/tests/test_compose_spec_model.py @@ -9,7 +9,7 @@ def test_autogenerated_compose_spec_model(tests_data_dir: Path): docker_compose_path = tests_data_dir / "docker-compose-meta.yml" # tests if parses valid file - compose_spec = ComposeSpecification.parse_obj( + compose_spec = ComposeSpecification.model_validate( yaml.safe_load(docker_compose_path.read_text()) ) From 32318b0a350671174cb1fe2c3bd9c924589cfa86 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 23 Sep 2024 23:26:23 +0200 Subject: [PATCH 087/280] fix error output --- packages/settings-library/tests/test_base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index c130a94feb7..51dad3de346 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -287,9 +287,11 @@ class SettingsClassExt(SettingsClass): error = err_info.value.errors()[0] assert error == { + "input": "", "loc": ("INT_VALUE_TO_NOTHING",), - "msg": "value is not a valid integer", - "type": "type_error.integer", + "msg": "Input should be a valid integer, unable to parse string as an integer", + "type": "int_parsing", + 'url': 'https://errors.pydantic.dev/2.9/v/int_parsing', } From 45baff83ea926b626ff8e0ee18b73e8e3b114e6a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 13:51:10 +0200 Subject: [PATCH 088/280] fix base settings --- .../src/settings_library/base.py | 167 ++++++++++-------- packages/settings-library/tests/test_base.py | 12 +- .../tests/test_base_w_postgres.py | 6 +- 3 files changed, 99 insertions(+), 86 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 7e3ca51dfb0..ba17b59c1cf 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,10 +1,12 @@ import logging from functools import cached_property from types import UnionType -from typing import Any, Final, get_args, get_origin +from typing import Any, Final, Literal, Sequence, get_args, get_origin -from pydantic import PydanticUserError, ValidationInfo, field_validator +from pydantic import ValidationError, ValidationInfo, field_validator from pydantic.fields import FieldInfo +from pydantic.v1.error_wrappers import ErrorList, ErrorWrapper +from pydantic_core import PydanticUndefined from pydantic_settings import BaseSettings, SettingsConfigDict _logger = logging.getLogger(__name__) @@ -14,8 +16,8 @@ ] = "%s auto_default_from_env unresolved, defaulting to None" -class DefaultFromEnvFactoryError(PydanticUserError): - ... +class DefaultFromEnvFactoryError(ValueError): + pass def allows_none(info: FieldInfo) -> bool: @@ -25,39 +27,51 @@ def allows_none(info: FieldInfo) -> bool: return False -# def create_settings_from_env(field): -# # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) -# # this value is still not resolved (field.type_ at that moment has a weak_ref). -# # Therefore we keep the entire 'field' but MUST be treated here as read-only +def get_type(info: FieldInfo) -> Any: + field_type = info.annotation + if args := get_args(info.annotation): + field_type = next(a for a in args if a != type(None)) + return field_type -# def _default_factory(): -# """Creates default from sub-settings or None (if nullable)""" -# field_settings_cls = field.type_ -# try: -# return field_settings_cls() -# except ValidationError as err: -# if field.allow_none: -# # e.g. Optional[PostgresSettings] would warn if defaults to None -# _logger.warning( -# _DEFAULTS_TO_NONE_MSG, -# field.name, -# ) -# return None +def is_literal(info: FieldInfo) -> bool: + origin = get_origin(info.annotation) + return origin is Literal -# def _prepend_field_name(ee: ErrorList): -# if isinstance(ee, ErrorWrapper): -# return ErrorWrapper(ee.exc, (field.name, *ee.loc_tuple())) -# assert isinstance(ee, Sequence) # nosec -# return [_prepend_field_name(e) for e in ee] -# raise DefaultFromEnvFactoryError( -# errors=_prepend_field_name(err.raw_errors), -# model=err.model, -# # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field -# ) from err +def create_settings_from_env(field_name, field): + # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) + # this value is still not resolved (field.type_ at that moment has a weak_ref). + # Therefore we keep the entire 'field' but MUST be treated here as read-only -# return _default_factory + def _default_factory(): + """Creates default from sub-settings or None (if nullable)""" + field_settings_cls = get_type(field) + try: + return field_settings_cls() + + except ValidationError as err: + if allows_none(field): + # e.g. Optional[PostgresSettings] would warn if defaults to None + _logger.warning( + _DEFAULTS_TO_NONE_MSG, + field_name, + ) + return None + + def _prepend_field_name(ee: ErrorList): + if isinstance(ee, ErrorWrapper): + return ErrorWrapper(ee.exc, (field_name, *ee.loc_tuple())) + assert isinstance(ee, Sequence) # nosec + return [_prepend_field_name(e) for e in ee] + + raise DefaultFromEnvFactoryError( + #errors=_prepend_field_name(err.errors()), + # model=err.model, + # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field + ) from err + + return _default_factory class BaseCustomSettings(BaseSettings): @@ -86,54 +100,53 @@ def parse_none(cls, v, info: ValidationInfo): case_sensitive=True, # All must be capitalized extra="forbid", frozen=True, - validate_default=True, + validate_default=False, ignored_types=(cached_property,), + defer_build=True, ) - # @classmethod - # def prepare_field(cls, field: ModelField) -> None: - # super().prepare_field(field) - - # auto_default_from_env = field.field_info.extra.get( - # "auto_default_from_env", False - # ) - - # field_type = field.type_ - # if args := get_args(field_type): - # field_type = next(a for a in args if a != type(None)) - - # # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models - # is_not_composed = ( - # get_origin(field_type) is None - # ) # is not composed as dict[str, Any] or Generic[Base] - # # avoid literals raising TypeError - # is_not_literal = is_literal_type(field.type_) is False - - # if ( - # is_not_literal - # and is_not_composed - # and issubclass(field_type, BaseCustomSettings) - # ): - # if auto_default_from_env: - # assert field.field_info.default is Undefined - # assert field.field_info.default_factory is None - - # # Transform it into something like `Field(default_factory=create_settings_from_env(field))` - # field.default_factory = create_settings_from_env(field) - # field.default = None - # field.required = False # has a default now - - # elif ( - # is_not_literal - # and is_not_composed - # and issubclass(field_type, BaseSettings) - # ): - # msg = f"{cls}.{field.name} of type {field_type} must inherit from BaseCustomSettings" - # raise ConfigError(msg) - - # elif auto_default_from_env: - # msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{field.name} is {field_type} " - # raise ConfigError(msg) + @classmethod + def __pydantic_init_subclass__(cls, **_kwargs: Any): + for name, field in cls.model_fields.items(): + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get("auto_default_from_env", False) # type: ignore[union-attr] + ) + + field_type = get_type(field) + + # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models + is_not_composed = ( + get_origin(field_type) is None + ) # is not composed as dict[str, Any] or Generic[Base] + is_not_literal = not get_origin(field.annotation) is Literal + + if ( + is_not_literal + and is_not_composed + and issubclass(field_type, BaseCustomSettings) + ): + if auto_default_from_env: + assert field.default is PydanticUndefined + assert field.default_factory is None + + # Transform it into something like `Field(default_factory=create_settings_from_env(field))` + field.default_factory = create_settings_from_env(name, field) + field.default = None + # field.required = False # has a default now + + elif ( + is_not_literal + and is_not_composed + and issubclass(field_type, BaseSettings) + ): + msg = f"{cls}.{name} of type {field_type} must inherit from BaseCustomSettings" + raise ValueError(msg) + + elif auto_default_from_env: + msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{name} is {field_type} " + raise ValueError(msg) + @classmethod def create_from_envs(cls, **overrides): diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 51dad3de346..c5668b596a4 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -65,7 +65,6 @@ class M1(BaseCustomSettings): VALUE_CONFUSING: S = None # type: ignore VALUE_NULLABLE_REQUIRED: S | None = ... # type: ignore - VALUE_NULLABLE_OPTIONAL: S | None VALUE_NULLABLE_DEFAULT_VALUE: S | None = S(S_VALUE=42) VALUE_NULLABLE_DEFAULT_NULL: S | None = None @@ -109,7 +108,7 @@ def test_create_settings_class( assert M.model_fields["VALUE_DEFAULT_ENV"].default_factory with pytest.raises(DefaultFromEnvFactoryError): - M.model_fields["VALUE_DEFAULT_ENV"].get_default() + M.model_fields["VALUE_DEFAULT_ENV"].get_default(call_default_factory=True) def test_create_settings_class_with_environment( @@ -145,12 +144,11 @@ def test_create_settings_class_with_environment( "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, } - assert instance.dict() == { + assert instance.model_dump() == { "VALUE": {"S_VALUE": 2}, "VALUE_DEFAULT": {"S_VALUE": 42}, "VALUE_CONFUSING": None, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, - "VALUE_NULLABLE_OPTIONAL": None, "VALUE_NULLABLE_DEFAULT_VALUE": {"S_VALUE": 42}, "VALUE_NULLABLE_DEFAULT_NULL": None, "VALUE_NULLABLE_DEFAULT_ENV": {"S_VALUE": 1}, @@ -164,10 +162,10 @@ def test_create_settings_class_without_environ_fails( # now defining S_VALUE M2_outside_context = create_settings_class("M2") - with pytest.raises(ValidationError) as err_info: + with pytest.raises(DefaultFromEnvFactoryError) as err_info: M2_outside_context.create_from_envs() - assert err_info.value.errors()[0] == { + assert err_info.value.errors[0] == { "loc": ("VALUE_DEFAULT_ENV", "S_VALUE"), "msg": "Field required", "type": "missing", @@ -291,7 +289,7 @@ class SettingsClassExt(SettingsClass): "loc": ("INT_VALUE_TO_NOTHING",), "msg": "Input should be a valid integer, unable to parse string as an integer", "type": "int_parsing", - 'url': 'https://errors.pydantic.dev/2.9/v/int_parsing', + "url": "https://errors.pydantic.dev/2.9/v/int_parsing", } diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index df9c4fd6cf3..1ff10839c0e 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -148,9 +148,11 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): S1() assert exc_info.value.errors()[0] == { + "input": {}, "loc": ("WEBSERVER_POSTGRES",), - "msg": "field required", - "type": "value_error.missing", + "msg": "Field required", + "type": "missing", + "url": "https://errors.pydantic.dev/2.9/v/missing", } s2 = S2() From 81f80de2b369d8c0e1c1e6c6c13e5135fec1535e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 14:04:45 +0200 Subject: [PATCH 089/280] fix error --- .../src/settings_library/base.py | 23 +++---------------- packages/settings-library/tests/test_base.py | 8 ++++--- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index ba17b59c1cf..8f37e372471 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,11 +1,10 @@ import logging from functools import cached_property from types import UnionType -from typing import Any, Final, Literal, Sequence, get_args, get_origin +from typing import Any, Final, Literal, get_args, get_origin from pydantic import ValidationError, ValidationInfo, field_validator from pydantic.fields import FieldInfo -from pydantic.v1.error_wrappers import ErrorList, ErrorWrapper from pydantic_core import PydanticUndefined from pydantic_settings import BaseSettings, SettingsConfigDict @@ -16,10 +15,6 @@ ] = "%s auto_default_from_env unresolved, defaulting to None" -class DefaultFromEnvFactoryError(ValueError): - pass - - def allows_none(info: FieldInfo) -> bool: origin = get_origin(info.annotation) # X | None or Optional[X] will return Union if origin is UnionType: @@ -59,17 +54,7 @@ def _default_factory(): ) return None - def _prepend_field_name(ee: ErrorList): - if isinstance(ee, ErrorWrapper): - return ErrorWrapper(ee.exc, (field_name, *ee.loc_tuple())) - assert isinstance(ee, Sequence) # nosec - return [_prepend_field_name(e) for e in ee] - - raise DefaultFromEnvFactoryError( - #errors=_prepend_field_name(err.errors()), - # model=err.model, - # FIXME: model = shall be the parent settings?? but I dont find how retrieve it from the field - ) from err + raise err return _default_factory @@ -111,8 +96,7 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): auto_default_from_env = ( field.json_schema_extra is not None and field.json_schema_extra.get("auto_default_from_env", False) # type: ignore[union-attr] - ) - + ) field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models @@ -147,7 +131,6 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{name} is {field_type} " raise ValueError(msg) - @classmethod def create_from_envs(cls, **overrides): # Kept for legacy. Identical to the constructor. diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index c5668b596a4..33f0baaca56 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -162,13 +162,15 @@ def test_create_settings_class_without_environ_fails( # now defining S_VALUE M2_outside_context = create_settings_class("M2") - with pytest.raises(DefaultFromEnvFactoryError) as err_info: + with pytest.raises(ValidationError) as err_info: M2_outside_context.create_from_envs() - assert err_info.value.errors[0] == { - "loc": ("VALUE_DEFAULT_ENV", "S_VALUE"), + assert err_info.value.errors()[0] == { + "input": {}, + "loc": ("S_VALUE",), "msg": "Field required", "type": "missing", + "url": "https://errors.pydantic.dev/2.9/v/missing", } From 1c33bfb900c261f7f0a33897e85f0d1629180369 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 14:18:38 +0200 Subject: [PATCH 090/280] fix error class --- .../src/settings_library/base.py | 16 +++++++++++----- packages/settings-library/tests/test_base.py | 4 ++-- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 8f37e372471..108665efffe 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -3,9 +3,9 @@ from types import UnionType from typing import Any, Final, Literal, get_args, get_origin -from pydantic import ValidationError, ValidationInfo, field_validator +from pydantic import ValidationInfo, field_validator from pydantic.fields import FieldInfo -from pydantic_core import PydanticUndefined +from pydantic_core import PydanticUndefined, ValidationError from pydantic_settings import BaseSettings, SettingsConfigDict _logger = logging.getLogger(__name__) @@ -15,6 +15,12 @@ ] = "%s auto_default_from_env unresolved, defaulting to None" +class DefaultFromEnvFactoryError(ValueError): + def __init__(self, errors): + super().__init__() + self.errors = errors + + def allows_none(info: FieldInfo) -> bool: origin = get_origin(info.annotation) # X | None or Optional[X] will return Union if origin is UnionType: @@ -54,7 +60,7 @@ def _default_factory(): ) return None - raise err + raise DefaultFromEnvFactoryError(errors=err.errors()) from err return _default_factory @@ -95,8 +101,8 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): for name, field in cls.model_fields.items(): auto_default_from_env = ( field.json_schema_extra is not None - and field.json_schema_extra.get("auto_default_from_env", False) # type: ignore[union-attr] - ) + and field.json_schema_extra.get("auto_default_from_env", False) + ) # type: ignore[union-attr] field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index 33f0baaca56..cf9c25f28ae 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -162,10 +162,10 @@ def test_create_settings_class_without_environ_fails( # now defining S_VALUE M2_outside_context = create_settings_class("M2") - with pytest.raises(ValidationError) as err_info: + with pytest.raises(DefaultFromEnvFactoryError) as err_info: M2_outside_context.create_from_envs() - assert err_info.value.errors()[0] == { + assert err_info.value.errors[0] == { "input": {}, "loc": ("S_VALUE",), "msg": "Field required", From 898155da53bad2af960a0d17fd00393cfe9f98ec Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 14:26:56 +0200 Subject: [PATCH 091/280] fix nullable --- packages/settings-library/tests/test_base_w_postgres.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index 1ff10839c0e..a71ae1a10f6 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -62,7 +62,7 @@ class S1(BaseCustomSettings): WEBSERVER_POSTGRES: _FakePostgresSettings class S2(BaseCustomSettings): - WEBSERVER_POSTGRES_NULLABLE_OPTIONAL: _FakePostgresSettings | None + WEBSERVER_POSTGRES_NULLABLE_OPTIONAL: _FakePostgresSettings | None = None class S3(BaseCustomSettings): # cannot be disabled!! From e3fc9c5a017c4c127dc87ec74cabceb1220a2657 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 16:00:52 +0200 Subject: [PATCH 092/280] update base --- packages/settings-library/src/settings_library/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 108665efffe..d66711dfc74 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -91,9 +91,9 @@ def parse_none(cls, v, info: ValidationInfo): case_sensitive=True, # All must be capitalized extra="forbid", frozen=True, - validate_default=False, - ignored_types=(cached_property,), + validate_default=True, defer_build=True, + ignored_types=(cached_property,), ) @classmethod @@ -101,8 +101,8 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): for name, field in cls.model_fields.items(): auto_default_from_env = ( field.json_schema_extra is not None - and field.json_schema_extra.get("auto_default_from_env", False) - ) # type: ignore[union-attr] + and field.json_schema_extra.get("auto_default_from_env", False) # type: ignore[union-attr] + ) field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models From e645331939ca9bf60af404c89b14c7b7e13f96fe Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 16:01:24 +0200 Subject: [PATCH 093/280] update test --- packages/settings-library/tests/test_base.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index cf9c25f28ae..e111b1bfe60 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -62,15 +62,15 @@ class S(BaseCustomSettings): class M1(BaseCustomSettings): VALUE: S VALUE_DEFAULT: S = S(S_VALUE=42) - VALUE_CONFUSING: S = None # type: ignore + #VALUE_CONFUSING: S = None # type: ignore VALUE_NULLABLE_REQUIRED: S | None = ... # type: ignore VALUE_NULLABLE_DEFAULT_VALUE: S | None = S(S_VALUE=42) VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) - VALUE_DEFAULT_ENV: S = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(json_schema_extra={"auto_default_from_env": True}) + VALUE_DEFAULT_ENV: S = Field(json_schema_extra={"auto_default_from_env": True}) class M2(BaseCustomSettings): # @@ -82,10 +82,10 @@ class M2(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None # defaults enabled but if not exists, it disables - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(json_schema_extra={"auto_default_from_env": True}) # cannot be disabled - VALUE_DEFAULT_ENV: S = Field(auto_default_from_env=True) + VALUE_DEFAULT_ENV: S = Field(json_schema_extra={"auto_default_from_env": True}) # Changed in version 3.7: Dictionary order is guaranteed to be insertion order _classes = {"M1": M1, "M2": M2, "S": S} @@ -147,7 +147,7 @@ def test_create_settings_class_with_environment( assert instance.model_dump() == { "VALUE": {"S_VALUE": 2}, "VALUE_DEFAULT": {"S_VALUE": 42}, - "VALUE_CONFUSING": None, + #"VALUE_CONFUSING": None, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, "VALUE_NULLABLE_DEFAULT_VALUE": {"S_VALUE": 42}, "VALUE_NULLABLE_DEFAULT_NULL": None, @@ -203,7 +203,9 @@ def test_auto_default_to_none_logs_a_warning( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -225,7 +227,9 @@ def test_auto_default_to_not_none( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(auto_default_from_env=True) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None From ff5533bf2f7ff235e344c8d4b04437d4fb063e73 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 16:04:07 +0200 Subject: [PATCH 094/280] restore Error type --- packages/settings-library/tests/test_base_w_postgres.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index a71ae1a10f6..24dd6f0b5d2 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -67,13 +67,13 @@ class S2(BaseCustomSettings): class S3(BaseCustomSettings): # cannot be disabled!! WEBSERVER_POSTGRES_DEFAULT_ENV: _FakePostgresSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) class S4(BaseCustomSettings): # defaults enabled but if cannot be resolved, it disables - WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = ( - Field(auto_default_from_env=True) + WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} ) class S5(BaseCustomSettings): @@ -117,7 +117,7 @@ def test_parse_from_empty_envs(model_classes_factory: Callable): assert s2.WEBSERVER_POSTGRES_NULLABLE_OPTIONAL is None with pytest.raises(DefaultFromEnvFactoryError): - # NOTE: cannot hae a default or assignment + # NOTE: cannot have a default or assignment S3() # auto default factory resolves to None (because is nullable) From 0eaa8c8e89333e7ce145bbcdcf121f5269a8edf5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 24 Sep 2024 17:12:16 +0200 Subject: [PATCH 095/280] update test --- .../src/settings_library/base.py | 9 ++-- .../tests/test_base_w_postgres.py | 46 +++++++++++++++---- 2 files changed, 40 insertions(+), 15 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index d66711dfc74..b6171d2c4c9 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -76,7 +76,7 @@ class BaseCustomSettings(BaseSettings): @field_validator("*", mode="before") @classmethod - def parse_none(cls, v, info: ValidationInfo): + def _parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! if ( info.field_name @@ -101,8 +101,8 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): for name, field in cls.model_fields.items(): auto_default_from_env = ( field.json_schema_extra is not None - and field.json_schema_extra.get("auto_default_from_env", False) # type: ignore[union-attr] - ) + and field.json_schema_extra.get("auto_default_from_env", False) + ) # type: ignore[union-attr] field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models @@ -123,7 +123,6 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): # Transform it into something like `Field(default_factory=create_settings_from_env(field))` field.default_factory = create_settings_from_env(name, field) field.default = None - # field.required = False # has a default now elif ( is_not_literal @@ -134,7 +133,7 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): raise ValueError(msg) elif auto_default_from_env: - msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclassesbut field {cls}.{name} is {field_type} " + msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclasses but field {cls}.{name} is {field_type} " raise ValueError(msg) @classmethod diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index 24dd6f0b5d2..4e817dae740 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -3,6 +3,7 @@ # pylint: disable=unused-variable +import os from collections.abc import Callable import pytest @@ -22,6 +23,13 @@ # +@pytest.fixture +def postgres_envvars_unset(monkeypatch: pytest.MonkeyPatch) -> None: + for name in os.environ: + if name.startswith("POSTGRES_"): + monkeypatch.delenv(name) + + @pytest.fixture def model_classes_factory() -> Callable: # @@ -72,8 +80,8 @@ class S3(BaseCustomSettings): class S4(BaseCustomSettings): # defaults enabled but if cannot be resolved, it disables - WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} + WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = ( + Field(json_schema_extra={"auto_default_from_env": True}) ) class S5(BaseCustomSettings): @@ -106,7 +114,9 @@ class S5(BaseCustomSettings): # -def test_parse_from_empty_envs(model_classes_factory: Callable): +def test_parse_from_empty_envs( + postgres_envvars_unset: None, model_classes_factory: Callable +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -128,7 +138,11 @@ def test_parse_from_empty_envs(model_classes_factory: Callable): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_parse_from_individual_envs(monkeypatch, model_classes_factory): +def test_parse_from_individual_envs( + postgres_envvars_unset: None, + monkeypatch: pytest.MonkeyPatch, + model_classes_factory: Callable, +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -194,7 +208,9 @@ def test_parse_from_individual_envs(monkeypatch, model_classes_factory): assert s5.dict() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} -def test_parse_compact_env(monkeypatch, model_classes_factory): +def test_parse_compact_env( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -306,7 +322,9 @@ def test_parse_compact_env(monkeypatch, model_classes_factory): } -def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): +def test_parse_from_mixed_envs( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): S1, S2, S3, S4, S5 = model_classes_factory() @@ -440,7 +458,9 @@ def test_parse_from_mixed_envs(monkeypatch, model_classes_factory): # -def test_toggle_plugin_1(monkeypatch, model_classes_factory): +def test_toggle_plugin_1( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() @@ -453,7 +473,9 @@ def test_toggle_plugin_1(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_2(monkeypatch, model_classes_factory): +def test_toggle_plugin_2( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() # minimal @@ -474,7 +496,9 @@ def test_toggle_plugin_2(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_3(monkeypatch, model_classes_factory): +def test_toggle_plugin_3( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() # explicitly disables @@ -497,7 +521,9 @@ def test_toggle_plugin_3(monkeypatch, model_classes_factory): assert s5.WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL is None -def test_toggle_plugin_4(monkeypatch, model_classes_factory): +def test_toggle_plugin_4( + postgres_envvars_unset: None, monkeypatch, model_classes_factory +): *_, S4, S5 = model_classes_factory() JSON_VALUE = '{"POSTGRES_HOST":"pg2", "POSTGRES_USER":"test2", "POSTGRES_PASSWORD":"shh2", "POSTGRES_DB":"db2"}' From 9c22d2b54001ce301a260a343bf7855839049373 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 12:53:33 +0200 Subject: [PATCH 096/280] force mode_rebuild --- .../src/settings_library/base.py | 27 ++++++++++--------- .../tests/test__pydantic_settings.py | 6 ++--- packages/settings-library/tests/test_base.py | 23 +++++++++++----- 3 files changed, 34 insertions(+), 22 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index b6171d2c4c9..d4f9d28e3d3 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -21,38 +21,38 @@ def __init__(self, errors): self.errors = errors -def allows_none(info: FieldInfo) -> bool: +def _allows_none(info: FieldInfo) -> bool: origin = get_origin(info.annotation) # X | None or Optional[X] will return Union if origin is UnionType: return any(x in get_args(info.annotation) for x in (type(None), Any)) return False -def get_type(info: FieldInfo) -> Any: +def _get_type(info: FieldInfo) -> Any: field_type = info.annotation if args := get_args(info.annotation): field_type = next(a for a in args if a != type(None)) return field_type -def is_literal(info: FieldInfo) -> bool: +def _is_literal(info: FieldInfo) -> bool: origin = get_origin(info.annotation) return origin is Literal -def create_settings_from_env(field_name, field): +def _create_settings_from_env(field_name: str, info: FieldInfo): # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) # this value is still not resolved (field.type_ at that moment has a weak_ref). # Therefore we keep the entire 'field' but MUST be treated here as read-only def _default_factory(): """Creates default from sub-settings or None (if nullable)""" - field_settings_cls = get_type(field) + field_settings_cls = _get_type(info) try: return field_settings_cls() except ValidationError as err: - if allows_none(field): + if _allows_none(info): # e.g. Optional[PostgresSettings] would warn if defaults to None _logger.warning( _DEFAULTS_TO_NONE_MSG, @@ -80,7 +80,7 @@ def _parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! if ( info.field_name - and allows_none(cls.model_fields[info.field_name]) + and _allows_none(cls.model_fields[info.field_name]) and isinstance(v, str) and v.lower() in ("null", "none") ): @@ -92,24 +92,25 @@ def _parse_none(cls, v, info: ValidationInfo): extra="forbid", frozen=True, validate_default=True, - defer_build=True, ignored_types=(cached_property,), ) @classmethod - def __pydantic_init_subclass__(cls, **_kwargs: Any): + def __pydantic_init_subclass__(cls, **kwargs: Any): + super().__pydantic_init_subclass__(**kwargs) + for name, field in cls.model_fields.items(): auto_default_from_env = ( field.json_schema_extra is not None and field.json_schema_extra.get("auto_default_from_env", False) ) # type: ignore[union-attr] - field_type = get_type(field) + field_type = _get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models is_not_composed = ( get_origin(field_type) is None ) # is not composed as dict[str, Any] or Generic[Base] - is_not_literal = not get_origin(field.annotation) is Literal + is_not_literal = not _is_literal(field) if ( is_not_literal @@ -121,7 +122,7 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): assert field.default_factory is None # Transform it into something like `Field(default_factory=create_settings_from_env(field))` - field.default_factory = create_settings_from_env(name, field) + field.default_factory = _create_settings_from_env(name, field) field.default = None elif ( @@ -136,6 +137,8 @@ def __pydantic_init_subclass__(cls, **_kwargs: Any): msg = f"auto_default_from_env=True can only be used in BaseCustomSettings subclasses but field {cls}.{name} is {field_type} " raise ValueError(msg) + cls.model_rebuild(force=True) + @classmethod def create_from_envs(cls, **overrides): # Kept for legacy. Identical to the constructor. diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index db098496af6..c97ff0f7289 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -15,7 +15,7 @@ from pydantic import ValidationInfo, field_validator from pydantic.fields import PydanticUndefined from pydantic_settings import BaseSettings -from settings_library.base import allows_none +from settings_library.base import _allows_none def assert_field_specs( @@ -29,7 +29,7 @@ def assert_field_specs( print(info) assert info.is_required() == is_required - assert allows_none(info) == is_nullable + assert _allows_none(info) == is_nullable if info.is_required(): # in this case, default is not really used @@ -55,7 +55,7 @@ class Settings(BaseSettings): @classmethod def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if info.field_name and allows_none(cls.model_fields[info.field_name]): + if info.field_name and _allows_none(cls.model_fields[info.field_name]): if isinstance(v, str) and v.lower() in ("null", "none"): return None return v diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index e111b1bfe60..b11a01f5542 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -42,7 +42,7 @@ def _print_defaults(model_cls: type[BaseModel]): for name, field in model_cls.model_fields.items(): print(name, ":", end="") try: - default = field.get_default() + default = field.get_default(call_default_factory=True) # new in Pydatic v2 print(default, type(default)) except ValidationError as err: print(err) @@ -62,15 +62,19 @@ class S(BaseCustomSettings): class M1(BaseCustomSettings): VALUE: S VALUE_DEFAULT: S = S(S_VALUE=42) - #VALUE_CONFUSING: S = None # type: ignore + # VALUE_CONFUSING: S = None # type: ignore VALUE_NULLABLE_REQUIRED: S | None = ... # type: ignore VALUE_NULLABLE_DEFAULT_VALUE: S | None = S(S_VALUE=42) VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(json_schema_extra={"auto_default_from_env": True}) - VALUE_DEFAULT_ENV: S = Field(json_schema_extra={"auto_default_from_env": True}) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) + VALUE_DEFAULT_ENV: S = Field( + json_schema_extra={"auto_default_from_env": True} + ) class M2(BaseCustomSettings): # @@ -82,10 +86,14 @@ class M2(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None # defaults enabled but if not exists, it disables - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field(json_schema_extra={"auto_default_from_env": True}) + VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) # cannot be disabled - VALUE_DEFAULT_ENV: S = Field(json_schema_extra={"auto_default_from_env": True}) + VALUE_DEFAULT_ENV: S = Field( + json_schema_extra={"auto_default_from_env": True} + ) # Changed in version 3.7: Dictionary order is guaranteed to be insertion order _classes = {"M1": M1, "M2": M2, "S": S} @@ -111,6 +119,7 @@ def test_create_settings_class( M.model_fields["VALUE_DEFAULT_ENV"].get_default(call_default_factory=True) +@pytest.mark.testit def test_create_settings_class_with_environment( monkeypatch: pytest.MonkeyPatch, create_settings_class: Callable[[str], type[BaseCustomSettings]], @@ -147,7 +156,7 @@ def test_create_settings_class_with_environment( assert instance.model_dump() == { "VALUE": {"S_VALUE": 2}, "VALUE_DEFAULT": {"S_VALUE": 42}, - #"VALUE_CONFUSING": None, + # "VALUE_CONFUSING": None, "VALUE_NULLABLE_REQUIRED": {"S_VALUE": 3}, "VALUE_NULLABLE_DEFAULT_VALUE": {"S_VALUE": 42}, "VALUE_NULLABLE_DEFAULT_NULL": None, From 4e74ec75b76e9b30aa152bd72852fe6a83a21bce Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 15:23:27 +0200 Subject: [PATCH 097/280] fix port type --- .../src/settings_library/utils_service.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index 99571eef230..6d269785475 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -4,14 +4,13 @@ """ from enum import Enum, auto -from pydantic import parse_obj_as from pydantic.networks import AnyUrl from pydantic.types import SecretStr from .basic_types import PortInt -DEFAULT_AIOHTTP_PORT: PortInt = parse_obj_as(PortInt, 8080) -DEFAULT_FASTAPI_PORT: PortInt = parse_obj_as(PortInt, 8000) +DEFAULT_AIOHTTP_PORT: PortInt = 8080 +DEFAULT_FASTAPI_PORT: PortInt = 8000 class URLPart(Enum): @@ -96,6 +95,8 @@ def _compose_url( assert prefix # nosec prefix = prefix.upper() + port_value = self._safe_getattr(f"{prefix}_PORT", port) + parts = { "scheme": ( "https" @@ -105,7 +106,7 @@ def _compose_url( "host": self._safe_getattr(f"{prefix}_HOST", URLPart.REQUIRED), "username": self._safe_getattr(f"{prefix}_USER", user), "password": self._safe_getattr(f"{prefix}_PASSWORD", password), - "port": self._safe_getattr(f"{prefix}_PORT", port), + "port": int(port_value) if port_value is not None else None, } if vtag != URLPart.EXCLUDE: # noqa: SIM102 @@ -125,7 +126,7 @@ def _compose_url( assert all(isinstance(v, str) or v is None for v in kwargs.values()) # nosec - composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] + composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] return composed_url def _build_api_base_url(self, *, prefix: str) -> str: From 0e3f7d341569e2ca303e3e97d4e2e841960ef308 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 15:44:02 +0200 Subject: [PATCH 098/280] continue upgrading --- packages/settings-library/src/settings_library/catalog.py | 3 +-- .../settings-library/src/settings_library/director_v2.py | 3 +-- .../settings-library/src/settings_library/node_ports.py | 8 ++++++-- .../src/settings_library/resource_usage_tracker.py | 3 +-- packages/settings-library/src/settings_library/storage.py | 3 +-- packages/settings-library/src/settings_library/tracing.py | 6 +++--- .../settings-library/src/settings_library/webserver.py | 4 +--- packages/settings-library/tests/conftest.py | 8 ++++++-- 8 files changed, 20 insertions(+), 18 deletions(-) diff --git a/packages/settings-library/src/settings_library/catalog.py b/packages/settings-library/src/settings_library/catalog.py index e5f44f29269..17c71237e81 100644 --- a/packages/settings-library/src/settings_library/catalog.py +++ b/packages/settings-library/src/settings_library/catalog.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class CatalogSettings(BaseCustomSettings, MixinServiceSettings): CATALOG_HOST: str = "catalog" CATALOG_PORT: PortInt = DEFAULT_FASTAPI_PORT - CATALOG_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + CATALOG_VTAG: VersionTag = "v0" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/director_v2.py b/packages/settings-library/src/settings_library/director_v2.py index 78c5edd78c6..baf32956c8e 100644 --- a/packages/settings-library/src/settings_library/director_v2.py +++ b/packages/settings-library/src/settings_library/director_v2.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class DirectorV2Settings(BaseCustomSettings, MixinServiceSettings): DIRECTOR_V2_HOST: str = "director-v2" DIRECTOR_V2_PORT: PortInt = DEFAULT_FASTAPI_PORT - DIRECTOR_V2_VTAG: VersionTag = parse_obj_as(VersionTag, "v2") + DIRECTOR_V2_VTAG: VersionTag = "v2" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index 70f90968bf5..e37ff74c383 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -33,9 +33,13 @@ def _validate_auth_fields(cls, values): class NodePortsSettings(BaseCustomSettings): - NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field(auto_default_from_env=True) # type: ignore[call-arg] + NODE_PORTS_STORAGE_AUTH: StorageAuthSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - POSTGRES_SETTINGS: PostgresSettings = Field(auto_default_from_env=True) # type: ignore[call-arg] + POSTGRES_SETTINGS: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) NODE_PORTS_MULTIPART_UPLOAD_COMPLETION_TIMEOUT_S: NonNegativeInt = int( timedelta(minutes=5).total_seconds() diff --git a/packages/settings-library/src/settings_library/resource_usage_tracker.py b/packages/settings-library/src/settings_library/resource_usage_tracker.py index dc696fab76c..d0df8f093ad 100644 --- a/packages/settings-library/src/settings_library/resource_usage_tracker.py +++ b/packages/settings-library/src/settings_library/resource_usage_tracker.py @@ -1,7 +1,6 @@ from datetime import timedelta from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -16,7 +15,7 @@ class ResourceUsageTrackerSettings(BaseCustomSettings, MixinServiceSettings): RESOURCE_USAGE_TRACKER_HOST: str = "resource-usage-tracker" RESOURCE_USAGE_TRACKER_PORT: PortInt = DEFAULT_FASTAPI_PORT - RESOURCE_USAGE_TRACKER_VTAG: VersionTag = parse_obj_as(VersionTag, "v1") + RESOURCE_USAGE_TRACKER_VTAG: VersionTag = "v1" @cached_property def api_base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/storage.py b/packages/settings-library/src/settings_library/storage.py index 92ec0301257..00ef1987037 100644 --- a/packages/settings-library/src/settings_library/storage.py +++ b/packages/settings-library/src/settings_library/storage.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -13,7 +12,7 @@ class StorageSettings(BaseCustomSettings, MixinServiceSettings): STORAGE_HOST: str = "storage" STORAGE_PORT: PortInt = DEFAULT_AIOHTTP_PORT - STORAGE_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + STORAGE_VTAG: VersionTag = "v0" @cached_property def base_url(self) -> str: diff --git a/packages/settings-library/src/settings_library/tracing.py b/packages/settings-library/src/settings_library/tracing.py index 36013bd93ef..e363bf2e19f 100644 --- a/packages/settings-library/src/settings_library/tracing.py +++ b/packages/settings-library/src/settings_library/tracing.py @@ -1,4 +1,4 @@ -from pydantic import AliasChoices, AnyUrl, Field, parse_obj_as +from pydantic import AliasChoices, AnyUrl, Field, TypeAdapter from .base import BaseCustomSettings @@ -7,11 +7,11 @@ class TracingSettings(BaseCustomSettings): TRACING_ZIPKIN_ENDPOINT: AnyUrl = Field( - default=parse_obj_as(AnyUrl, "http://jaeger:9411"), + default=TypeAdapter(AnyUrl).validate_python("http://jaeger:9411"), description="Zipkin compatible endpoint", ) TRACING_THRIFT_COMPACT_ENDPOINT: AnyUrl = Field( - default=parse_obj_as(AnyUrl, "http://jaeger:5775"), + default=TypeAdapter(AnyUrl).validate_python("http://jaeger:5775"), description="accept zipkin.thrift over compact thrift protocol (deprecated, used by legacy clients only)", ) TRACING_CLIENT_NAME: str = Field( diff --git a/packages/settings-library/src/settings_library/webserver.py b/packages/settings-library/src/settings_library/webserver.py index 4da2c41d699..c32bdbeb0c5 100644 --- a/packages/settings-library/src/settings_library/webserver.py +++ b/packages/settings-library/src/settings_library/webserver.py @@ -1,7 +1,5 @@ from functools import cached_property -from pydantic import parse_obj_as - from .base import BaseCustomSettings from .basic_types import PortInt, VersionTag from .utils_service import DEFAULT_AIOHTTP_PORT, MixinServiceSettings, URLPart @@ -10,7 +8,7 @@ class WebServerSettings(BaseCustomSettings, MixinServiceSettings): WEBSERVER_HOST: str = "webserver" WEBSERVER_PORT: PortInt = DEFAULT_AIOHTTP_PORT - WEBSERVER_VTAG: VersionTag = parse_obj_as(VersionTag, "v0") + WEBSERVER_VTAG: VersionTag = "v0" @cached_property def base_url(self) -> str: diff --git a/packages/settings-library/tests/conftest.py b/packages/settings-library/tests/conftest.py index 725f19c534a..0431a6c6748 100644 --- a/packages/settings-library/tests/conftest.py +++ b/packages/settings-library/tests/conftest.py @@ -96,9 +96,13 @@ class _ApplicationSettings(BaseCustomSettings): # NOTE: by convention, an addon is disabled when APP_ADDON=None, so we make this # entry nullable as well - APP_OPTIONAL_ADDON: _ModuleSettings | None = Field(auto_default_from_env=True) + APP_OPTIONAL_ADDON: _ModuleSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) # NOTE: example of a group that cannot be disabled (not nullable) - APP_REQUIRED_PLUGIN: PostgresSettings | None = Field(auto_default_from_env=True) + APP_REQUIRED_PLUGIN: PostgresSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) return _ApplicationSettings From 6aaaea1ca22f03e05dafe725d24be8a2263e2599 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 15:45:28 +0200 Subject: [PATCH 099/280] continue upgrading --- packages/settings-library/src/settings_library/r_clone.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/settings-library/src/settings_library/r_clone.py b/packages/settings-library/src/settings_library/r_clone.py index ff04d509bef..c4288466928 100644 --- a/packages/settings-library/src/settings_library/r_clone.py +++ b/packages/settings-library/src/settings_library/r_clone.py @@ -13,7 +13,7 @@ class S3Provider(StrEnum): class RCloneSettings(BaseCustomSettings): - R_CLONE_S3: S3Settings = Field(auto_default_from_env=True) + R_CLONE_S3: S3Settings = Field(json_schema_extra={"auto_default_from_env": True}) R_CLONE_PROVIDER: S3Provider # SEE https://rclone.org/docs/#transfers-n From 4ddddc993bf1fec8c5e87d2d1c4e6d0a38aee3de Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 16:10:18 +0200 Subject: [PATCH 100/280] fix AnyUrl build --- .../src/settings_library/utils_service.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index 6d269785475..21d22a7173d 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -104,9 +104,9 @@ def _compose_url( else "http" ), "host": self._safe_getattr(f"{prefix}_HOST", URLPart.REQUIRED), + "port": int(port_value) if port_value is not None else None, "username": self._safe_getattr(f"{prefix}_USER", user), "password": self._safe_getattr(f"{prefix}_PASSWORD", password), - "port": int(port_value) if port_value is not None else None, } if vtag != URLPart.EXCLUDE: # noqa: SIM102 @@ -116,15 +116,17 @@ def _compose_url( # post process parts dict kwargs = {} for k, v in parts.items(): - value = v if isinstance(v, SecretStr): value = v.get_secret_value() - elif v is not None: - value = f"{v}" + else: + value = v - kwargs[k] = value + if value is not None: + kwargs[k] = value - assert all(isinstance(v, str) or v is None for v in kwargs.values()) # nosec + assert all( + isinstance(v, (str, int)) or v is None for v in kwargs.values() + ) # nosec composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] return composed_url From 8120fff00d24d44324c1d890005292f359f7aa38 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 16:51:49 +0200 Subject: [PATCH 101/280] fix field_info --- .../src/settings_library/utils_cli.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 844dffc347c..b8ea4fd098d 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -24,14 +24,15 @@ def print_as_envfile( ): exclude_unset = pydantic_export_options.get("exclude_unset", False) - for field in settings_obj.__fields__.values(): - auto_default_from_env = field.field_info.extra.get( - "auto_default_from_env", False - ) + for name, field in settings_obj.model_fields.items(): + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get("auto_default_from_env", False) + ) # type: ignore[union-attr] - value = getattr(settings_obj, field.name) + value = getattr(settings_obj, name) - if exclude_unset and field.name not in settings_obj.__fields_set__: + if exclude_unset and name not in settings_obj.model_fields_set: if not auto_default_from_env: continue if value is None: @@ -42,7 +43,7 @@ def print_as_envfile( value = f"'{value.model_dump_json(**pydantic_export_options)}'" # flat else: if verbose: - typer.echo(f"\n# --- {field.name} --- ") + typer.echo(f"\n# --- {name} --- ") print_as_envfile( value, compact=False, @@ -55,11 +56,10 @@ def print_as_envfile( value = value.get_secret_value() if verbose: - field_info = field.field_info - if field_info.description: - typer.echo(f"# {field_info.description}") + if field.description: + typer.echo(f"# {field.description}") - typer.echo(f"{field.name}={value}") + typer.echo(f"{name}={value}") def print_as_json(settings_obj, *, compact=False, **pydantic_export_options): From a28234decf3a4320d4c05d0b15decbb59da0e5e6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 19:15:44 +0200 Subject: [PATCH 102/280] fix utils service --- .../settings-library/src/settings_library/utils_service.py | 4 ++-- packages/settings-library/tests/test_utils_service.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index 21d22a7173d..0c5c22ce636 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -111,7 +111,7 @@ def _compose_url( if vtag != URLPart.EXCLUDE: # noqa: SIM102 if v := self._safe_getattr(f"{prefix}_VTAG", vtag): - parts["path"] = f"/{v}" + parts["path"] = f"{v}" # post process parts dict kwargs = {} @@ -129,7 +129,7 @@ def _compose_url( ) # nosec composed_url: str = str(AnyUrl.build(**kwargs)) # type: ignore[arg-type] - return composed_url + return composed_url.rstrip("/") def _build_api_base_url(self, *, prefix: str) -> str: return self._compose_url( diff --git a/packages/settings-library/tests/test_utils_service.py b/packages/settings-library/tests/test_utils_service.py index ab6a4a5db6f..8ecd9835893 100644 --- a/packages/settings-library/tests/test_utils_service.py +++ b/packages/settings-library/tests/test_utils_service.py @@ -24,9 +24,9 @@ class MySettings(BaseCustomSettings, MixinServiceSettings): MY_VTAG: VersionTag | None = None MY_SECURE: bool = False - # optional - MY_USER: str | None - MY_PASSWORD: SecretStr | None + # optional (in Pydantic v2 requires a default) + MY_USER: str | None = None + MY_PASSWORD: SecretStr | None = None @cached_property def api_base_url(self) -> str: From da97728aa8da9ce791d49963ae235f0000f3136c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 19:23:27 +0200 Subject: [PATCH 103/280] fix mypy --- .../src/settings_library/base.py | 6 ++++-- .../src/settings_library/docker_registry.py | 5 +++-- .../src/settings_library/prometheus.py | 20 ++++++++++--------- .../src/settings_library/utils_cli.py | 7 +++---- 4 files changed, 21 insertions(+), 17 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index d4f9d28e3d3..4e8e85e996b 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -102,8 +102,10 @@ def __pydantic_init_subclass__(cls, **kwargs: Any): for name, field in cls.model_fields.items(): auto_default_from_env = ( field.json_schema_extra is not None - and field.json_schema_extra.get("auto_default_from_env", False) - ) # type: ignore[union-attr] + and field.json_schema_extra.get( # type: ignore[union-attr] + "auto_default_from_env", False + ) + ) field_type = _get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index aa41f74106c..e899ce45718 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -1,7 +1,8 @@ from functools import cached_property from typing import Any -from pydantic import ConfigDict, Field, SecretStr, field_validator +from pydantic import Field, SecretStr, field_validator +from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -36,7 +37,7 @@ def resolved_registry_url(self) -> str: def api_url(self) -> str: return f"{self.REGISTRY_URL}/v2" - model_config = ConfigDict( + model_config = SettingsConfigDict( json_schema_extra={ "examples": [ { diff --git a/packages/settings-library/src/settings_library/prometheus.py b/packages/settings-library/src/settings_library/prometheus.py index 065c7e930f0..bee0399baf2 100644 --- a/packages/settings-library/src/settings_library/prometheus.py +++ b/packages/settings-library/src/settings_library/prometheus.py @@ -24,14 +24,16 @@ def origin(self) -> str: @cached_property def api_url(self) -> str: assert self.PROMETHEUS_URL.host # nosec - prometheus_url: str = AnyUrl.build( - scheme=self.PROMETHEUS_URL.scheme, - user=self.PROMETHEUS_USERNAME, - password=self.PROMETHEUS_PASSWORD.get_secret_value() - if self.PROMETHEUS_PASSWORD - else None, - host=self.PROMETHEUS_URL.host, - port=self.PROMETHEUS_URL.port, - path=self.PROMETHEUS_URL.path, + prometheus_url: str = str( + AnyUrl.build( + scheme=self.PROMETHEUS_URL.scheme, + username=self.PROMETHEUS_USERNAME, + password=self.PROMETHEUS_PASSWORD.get_secret_value() + if self.PROMETHEUS_PASSWORD + else None, + host=self.PROMETHEUS_URL.host, + port=self.PROMETHEUS_URL.port, + path=self.PROMETHEUS_URL.path, + ) ) return prometheus_url diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index b8ea4fd098d..94576dcc4ac 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -25,10 +25,9 @@ def print_as_envfile( exclude_unset = pydantic_export_options.get("exclude_unset", False) for name, field in settings_obj.model_fields.items(): - auto_default_from_env = ( - field.json_schema_extra is not None - and field.json_schema_extra.get("auto_default_from_env", False) - ) # type: ignore[union-attr] + auto_default_from_env = field.json_schema_extra is not None and field.json_schema_extra.get( + "auto_default_from_env", False # type: ignore[union-attr] + ) value = getattr(settings_obj, name) From 06912beb01d8624796e0c8c60aa864c11191f057 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 21:13:23 +0200 Subject: [PATCH 104/280] set optional --- packages/settings-library/tests/test_utils_logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/settings-library/tests/test_utils_logging.py b/packages/settings-library/tests/test_utils_logging.py index 47c2f316505..0ca2315e786 100644 --- a/packages/settings-library/tests/test_utils_logging.py +++ b/packages/settings-library/tests/test_utils_logging.py @@ -14,7 +14,7 @@ def test_mixin_logging(monkeypatch): class Settings(BaseCustomSettings, MixinLoggingSettings): # DOCKER - SC_BOOT_MODE: BootMode | None + SC_BOOT_MODE: BootMode | None = None # LOGGING LOG_LEVEL: str = Field( From a0d6f84345ec94b50e1ff04a5b1cb60612d7a15e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 21:15:16 +0200 Subject: [PATCH 105/280] set optional --- packages/settings-library/src/settings_library/node_ports.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index e37ff74c383..8dddc51a192 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -11,8 +11,8 @@ class StorageAuthSettings(StorageSettings): - STORAGE_USERNAME: str | None - STORAGE_PASSWORD: SecretStr | None + STORAGE_USERNAME: str | None = None + STORAGE_PASSWORD: SecretStr | None = None STORAGE_SECURE: bool = False @property From 8e62f69943de8f450a8bd9e57410ffa152e5e037 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 21:50:16 +0200 Subject: [PATCH 106/280] continue upgrading --- packages/settings-library/src/settings_library/node_ports.py | 4 ++-- packages/settings-library/src/settings_library/postgres.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/settings-library/src/settings_library/node_ports.py b/packages/settings-library/src/settings_library/node_ports.py index 8dddc51a192..522fcdd0991 100644 --- a/packages/settings-library/src/settings_library/node_ports.py +++ b/packages/settings-library/src/settings_library/node_ports.py @@ -24,8 +24,8 @@ def auth_required(self) -> bool: @model_validator(mode="after") @classmethod def _validate_auth_fields(cls, values): - username = values["STORAGE_USERNAME"] - password = values["STORAGE_PASSWORD"] + username = values.STORAGE_USERNAME + password = values.STORAGE_PASSWORD if (username is None) != (password is None): msg = f"Both {username=} and {password=} must be either set or unset!" raise ValueError(msg) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index c15740f068b..d58fc462d98 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -63,7 +63,7 @@ def dsn(self) -> str: password=self.POSTGRES_PASSWORD.get_secret_value(), host=self.POSTGRES_HOST, port=self.POSTGRES_PORT, - path=f"/{self.POSTGRES_DB}", + path=f"{self.POSTGRES_DB}", ) ) return dsn @@ -77,7 +77,7 @@ def dsn_with_async_sqlalchemy(self) -> str: password=self.POSTGRES_PASSWORD.get_secret_value(), host=self.POSTGRES_HOST, port=self.POSTGRES_PORT, - path=f"/{self.POSTGRES_DB}", + path=f"{self.POSTGRES_DB}", ) ) return dsn From e3510cd1476627ad963943640da07b2dd0b0b8ac Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 22:13:30 +0200 Subject: [PATCH 107/280] continue upgrading --- .../tests/test_service_settings_labels.py | 18 +++++++++--------- .../tests/test_user_preferences.py | 2 +- .../src/settings_library/utils_cli.py | 2 +- .../settings-library/tests/test_utils_cli.py | 4 ++-- .../simcore_sdk/node_ports_common/r_clone.py | 2 +- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index 0ae7824ac8a..287e3d5614b 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -126,7 +126,7 @@ def test_path_mappings_json_encoding(): path_mappings = PathMappingsLabel.model_validate(example) print(path_mappings) assert ( - PathMappingsLabel.parse_raw(path_mappings.model_dump_json()) + PathMappingsLabel.model_validate_json(path_mappings.model_dump_json()) == path_mappings ) @@ -262,7 +262,7 @@ def test_container_outgoing_permit_list_and_container_allow_internet_with_compos "simcore.service.container-http-entrypoint": container_name_1, } - instance = DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + instance = DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( instance.containers_allowed_outgoing_permit_list[container_name_1][0] == expected_host_permit_list_policy @@ -291,7 +291,7 @@ def test_container_outgoing_permit_list_and_container_allow_internet_without_com ) }, ): - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert TypeAdapter(DynamicSidecarServiceLabels).validate_json(json.dumps(dict_data)) def test_container_allow_internet_no_compose_spec_not_ok(): @@ -299,7 +299,7 @@ def test_container_allow_internet_no_compose_spec_not_ok(): "simcore.service.containers-allowed-outgoing-internet": json.dumps(["hoho"]), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert "Expected only 1 entry 'container' not '{'hoho'}" in f"{exec_info.value}" @@ -312,7 +312,7 @@ def test_container_allow_internet_compose_spec_not_ok(): "simcore.service.containers-allowed-outgoing-internet": json.dumps(["hoho"]), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert f"container='hoho' not found in {compose_spec=}" in f"{exec_info.value}" @@ -331,7 +331,7 @@ def test_container_outgoing_permit_list_no_compose_spec_not_ok(): ), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Expected only one entry '{DEFAULT_SINGLE_SERVICE_NAME}' not 'container_name'" in f"{exec_info.value}" @@ -355,7 +355,7 @@ def test_container_outgoing_permit_list_compose_spec_not_ok(): "simcore.service.compose-spec": json.dumps(compose_spec), } with pytest.raises(ValidationError) as exec_info: - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Trying to permit list container='container_name' which was not found in {compose_spec=}" in f"{exec_info.value}" @@ -378,7 +378,7 @@ def test_not_allowed_in_both_permit_list_and_outgoing_internet(): } with pytest.raises(ValidationError) as exec_info: - DynamicSidecarServiceLabels.parse_raw(json.dumps(dict_data)) + DynamicSidecarServiceLabels.model_validate_json(json.dumps(dict_data)) assert ( f"Not allowed common_containers={{'{container_name}'}} detected" @@ -610,4 +610,4 @@ def test_user_preferences_path_is_part_of_exiting_volume(): ), } with pytest.raises(ValidationError, match="user_preferences_path=/tmp/outputs"): - assert DynamicSidecarServiceLabels.parse_raw(json.dumps(labels_data)) + assert DynamicSidecarServiceLabels.model_validate_json(json.dumps(labels_data)) diff --git a/packages/models-library/tests/test_user_preferences.py b/packages/models-library/tests/test_user_preferences.py index f90d2cd482a..edac734f0c7 100644 --- a/packages/models-library/tests/test_user_preferences.py +++ b/packages/models-library/tests/test_user_preferences.py @@ -128,7 +128,7 @@ def test__user_service__user_preference( # NOTE: these will be stored as bytes, # check bytes serialization/deserialization pref1_as_bytes = pref1.model_dump_json().encode() - new_instance = UserServiceUserPreference.parse_raw(pref1_as_bytes) + new_instance = UserServiceUserPreference.model_validate_json(pref1_as_bytes) assert new_instance == pref1 diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 94576dcc4ac..fa8e7e993e6 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -63,7 +63,7 @@ def print_as_envfile( def print_as_json(settings_obj, *, compact=False, **pydantic_export_options): typer.echo( - settings_obj.json(indent=None if compact else 2, **pydantic_export_options) + settings_obj.model_dump_json(indent=None if compact else 2, **pydantic_export_options) ) diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 611ccf2509f..9eb621a5bd5 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -85,7 +85,7 @@ def fake_granular_env_file_content() -> str: def export_as_dict() -> Callable: def _export(model_obj, **export_options): return json.loads( - model_obj.json( + model_obj.model_dump_json( encoder=create_json_encoder_wo_secrets(model_obj.__class__), **export_options, ) @@ -136,7 +136,7 @@ def test_settings_as_json( # reuse resulting json to build settings settings: dict = json.loads(result.stdout) - assert fake_settings_class.parse_obj(settings) + assert fake_settings_class.model_validate(settings) def test_settings_as_json_schema( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 90727e0dd21..fbbcf86d433 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -167,7 +167,7 @@ async def _get_folder_size( cwd=f"{local_dir.resolve()}", ) - rclone_folder_size_result = _RCloneSize.parse_raw(result) + rclone_folder_size_result = _RCloneSize.model_validate_json(result) _logger.debug( "RClone size call for %s: %s", f"{folder}", f"{rclone_folder_size_result}" ) From 3e8ca5d54be2304b03026ec0c2fff67c7169d3b7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 23:19:22 +0200 Subject: [PATCH 108/280] continue upgrading --- packages/settings-library/tests/test_utils_logging.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/packages/settings-library/tests/test_utils_logging.py b/packages/settings-library/tests/test_utils_logging.py index 0ca2315e786..d7f4cf31569 100644 --- a/packages/settings-library/tests/test_utils_logging.py +++ b/packages/settings-library/tests/test_utils_logging.py @@ -40,14 +40,10 @@ def _v(cls, value) -> str: assert settings.LOG_LEVEL == "DEBUG" assert ( - settings.json() - == '{"SC_BOOT_MODE": null, "LOG_LEVEL": "DEBUG", "APPNAME_DEBUG": false}' + settings.model_dump_json() + == '{"SC_BOOT_MODE":null,"LOG_LEVEL":"DEBUG","APPNAME_DEBUG":false}' ) # test cached-property assert settings.log_level == logging.DEBUG - # log_level is cached-property (notice that is lower-case!), and gets added after first use - assert ( - settings.json() - == '{"SC_BOOT_MODE": null, "LOG_LEVEL": "DEBUG", "APPNAME_DEBUG": false, "log_level": 10}' - ) + From fffb8125bba580cae1159cb722af77d9b17e72c0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 25 Sep 2024 23:58:37 +0200 Subject: [PATCH 109/280] fix cached property --- packages/settings-library/tests/test_postgres.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/packages/settings-library/tests/test_postgres.py b/packages/settings-library/tests/test_postgres.py index 1708acc7808..fdde00f55cd 100644 --- a/packages/settings-library/tests/test_postgres.py +++ b/packages/settings-library/tests/test_postgres.py @@ -17,15 +17,12 @@ def test_cached_property_dsn(mock_environment: dict): settings = PostgresSettings() # all are upper-case - assert all(key == key.upper() for key in settings.dict()) - - # dsn is computed from the other fields - assert "dsn" not in settings.dict() - - # causes cached property to be computed and stored on the instance + assert all(key == key.upper() for key in settings.model_dump()) + assert settings.dsn - assert "dsn" in settings.dict() + # dsn is computed from the other fields + assert "dsn" not in settings.model_dump() def test_dsn_with_query(mock_environment: dict, monkeypatch): From e5f9263ff5fdc19a8dc0d4adad75395eb842d427 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 26 Sep 2024 00:01:07 +0200 Subject: [PATCH 110/280] remove pytest.mark --- packages/settings-library/tests/test_base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index b11a01f5542..3344aa6b35a 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -119,7 +119,6 @@ def test_create_settings_class( M.model_fields["VALUE_DEFAULT_ENV"].get_default(call_default_factory=True) -@pytest.mark.testit def test_create_settings_class_with_environment( monkeypatch: pytest.MonkeyPatch, create_settings_class: Callable[[str], type[BaseCustomSettings]], From bc208a29fcc7a30bce29f81736dd30b0ce0a41b1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 26 Sep 2024 09:45:28 +0200 Subject: [PATCH 111/280] fix mypy --- packages/settings-library/src/settings_library/utils_cli.py | 2 +- packages/settings-library/src/settings_library/utils_service.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index fa8e7e993e6..fa4a7e9a36b 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -26,7 +26,7 @@ def print_as_envfile( for name, field in settings_obj.model_fields.items(): auto_default_from_env = field.json_schema_extra is not None and field.json_schema_extra.get( - "auto_default_from_env", False # type: ignore[union-attr] + "auto_default_from_env", False ) value = getattr(settings_obj, name) diff --git a/packages/settings-library/src/settings_library/utils_service.py b/packages/settings-library/src/settings_library/utils_service.py index 0c5c22ce636..17746487a6f 100644 --- a/packages/settings-library/src/settings_library/utils_service.py +++ b/packages/settings-library/src/settings_library/utils_service.py @@ -115,7 +115,7 @@ def _compose_url( # post process parts dict kwargs = {} - for k, v in parts.items(): + for k, v in parts.items(): # type: ignore[assignment] if isinstance(v, SecretStr): value = v.get_secret_value() else: From 85eb86fc9309bb8f7d0b6fdff4fcec386cfb9d11 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 26 Sep 2024 12:19:37 +0200 Subject: [PATCH 112/280] fix parse none env --- packages/settings-library/src/settings_library/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 4e8e85e996b..761b246ff94 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -82,7 +82,7 @@ def _parse_none(cls, v, info: ValidationInfo): info.field_name and _allows_none(cls.model_fields[info.field_name]) and isinstance(v, str) - and v.lower() in ("null", "none") + and v.lower() in ("none",) ): return None return v @@ -93,6 +93,7 @@ def _parse_none(cls, v, info: ValidationInfo): frozen=True, validate_default=True, ignored_types=(cached_property,), + env_parse_none_str="null", ) @classmethod From 476e53820b7610a85e7b3c87c667f62727a46817 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 26 Sep 2024 12:50:05 +0200 Subject: [PATCH 113/280] fix env list parsing --- packages/settings-library/src/settings_library/twilio.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index aac1d9028e0..429fabd2b73 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -8,7 +8,7 @@ from typing import Annotated, TypeAlias -from pydantic import Field, StringConstraints, TypeAdapter +from pydantic import Field, StringConstraints, TypeAdapter, field_validator from .base import BaseCustomSettings @@ -30,6 +30,13 @@ class TwilioSettings(BaseCustomSettings): "See https://support.twilio.com/hc/en-us/articles/223133767-International-support-for-Alphanumeric-Sender-ID", ) + + @field_validator("TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT", mode="before") + @classmethod + def _parse_country_codes_from_env(cls, v): + return [str(cc) for cc in v] + + def is_alphanumeric_supported(self, phone_number: str) -> bool: # Some countries do not support alphanumeric serder ID # From f28c41455699fc6fdb60a3aa57fcce06879031fe Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 26 Sep 2024 15:04:43 +0200 Subject: [PATCH 114/280] use BeforeValidator for country codes --- .../src/settings_library/twilio.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index 429fabd2b73..b63e35caf61 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -8,13 +8,16 @@ from typing import Annotated, TypeAlias -from pydantic import Field, StringConstraints, TypeAdapter, field_validator +from pydantic import BeforeValidator, Field, StringConstraints, TypeAdapter from .base import BaseCustomSettings - # Based on https://countrycode.org/ -CountryCodeStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True, pattern=r"^\d{1,4}")] +CountryCodeStr: TypeAlias = Annotated[ + str, + BeforeValidator(str), + StringConstraints(strip_whitespace=True, pattern=r"^\d{1,4}"), +] class TwilioSettings(BaseCustomSettings): @@ -30,13 +33,6 @@ class TwilioSettings(BaseCustomSettings): "See https://support.twilio.com/hc/en-us/articles/223133767-International-support-for-Alphanumeric-Sender-ID", ) - - @field_validator("TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT", mode="before") - @classmethod - def _parse_country_codes_from_env(cls, v): - return [str(cc) for cc in v] - - def is_alphanumeric_supported(self, phone_number: str) -> bool: # Some countries do not support alphanumeric serder ID # From 5d4a6a2029f7c5421595fe5c0f0fbe6d73ddf875 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 26 Sep 2024 15:13:50 +0200 Subject: [PATCH 115/280] fix mypy --- .../tests/test_base_w_postgres.py | 40 +++++++++---------- packages/settings-library/tests/test_email.py | 2 +- .../settings-library/tests/test_postgres.py | 6 +-- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index 4e817dae740..85fd98c7522 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -170,12 +170,12 @@ def test_parse_from_individual_envs( } s2 = S2() - assert s2.dict(exclude_unset=True) == {} - assert s2.dict() == {"WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": None} + assert s2.model_dump(exclude_unset=True) == {} + assert s2.model_dump() == {"WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": None} s3 = S3() - assert s3.dict(exclude_unset=True) == {} - assert s3.dict() == { + assert s3.model_dump(exclude_unset=True) == {} + assert s3.model_dump() == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg", "POSTGRES_USER": "test", @@ -189,8 +189,8 @@ def test_parse_from_individual_envs( } s4 = S4() - assert s4.dict(exclude_unset=True) == {} - assert s4.dict() == { + assert s4.model_dump(exclude_unset=True) == {} + assert s4.model_dump() == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg", "POSTGRES_USER": "test", @@ -204,8 +204,8 @@ def test_parse_from_individual_envs( } s5 = S5() - assert s5.dict(exclude_unset=True) == {} - assert s5.dict() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} + assert s5.model_dump(exclude_unset=True) == {} + assert s5.model_dump() == {"WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": None} def test_parse_compact_env( @@ -229,7 +229,7 @@ def test_parse_compact_env( # test s1 = S1() - assert s1.dict(exclude_unset=True) == { + assert s1.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -237,7 +237,7 @@ def test_parse_compact_env( "POSTGRES_DB": "db2", } } - assert s1.dict() == { + assert s1.model_dump() == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -258,7 +258,7 @@ def test_parse_compact_env( """, ) s2 = S2() - assert s2.dict(exclude_unset=True) == { + assert s2.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -278,7 +278,7 @@ def test_parse_compact_env( # default until it is really needed. Here before it would # fail because default cannot be computed even if the final value can! s3 = S3() - assert s3.dict(exclude_unset=True) == { + assert s3.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -295,7 +295,7 @@ def test_parse_compact_env( """, ) s4 = S4() - assert s4.dict(exclude_unset=True) == { + assert s4.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -312,7 +312,7 @@ def test_parse_compact_env( """, ) s5 = S5() - assert s5.dict(exclude_unset=True) == { + assert s5.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -348,7 +348,7 @@ def test_parse_from_mixed_envs( s1 = S1() - assert s1.dict() == { + assert s1.model_dump() == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -363,7 +363,7 @@ def test_parse_from_mixed_envs( # NOTE how unset marks also applies to embedded fields # NOTE: (1) priority of json-compact over granulated # NOTE: (2) json-compact did not define this but granulated did - assert s1.dict(exclude_unset=True) == { + assert s1.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES": { "POSTGRES_HOST": "pg2", # <- (1) "POSTGRES_USER": "test2", # <- (1) @@ -380,7 +380,7 @@ def test_parse_from_mixed_envs( ) s2 = S2() - assert s2.dict(exclude_unset=True) == { + assert s2.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_OPTIONAL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -397,7 +397,7 @@ def test_parse_from_mixed_envs( ) s3 = S3() - assert s3.dict(exclude_unset=True) == { + assert s3.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -414,7 +414,7 @@ def test_parse_from_mixed_envs( ) s4 = S4() - assert s4.dict(exclude_unset=True) == { + assert s4.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", @@ -431,7 +431,7 @@ def test_parse_from_mixed_envs( ) s5 = S5() - assert s5.dict(exclude_unset=True) == { + assert s5.model_dump(exclude_unset=True) == { "WEBSERVER_POSTGRES_NULLABLE_DEFAULT_NULL": { "POSTGRES_HOST": "pg2", "POSTGRES_USER": "test2", diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index 1cd3978503e..acb9d607c89 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -67,7 +67,7 @@ def all_env_devel_undefined( ], ) def test_smtp_configuration_ok(cfg: dict[str, Any], all_env_devel_undefined: None): - assert SMTPSettings.parse_obj(cfg) + assert SMTPSettings.model_validate(cfg) @pytest.mark.parametrize( diff --git a/packages/settings-library/tests/test_postgres.py b/packages/settings-library/tests/test_postgres.py index fdde00f55cd..c191f0ea37e 100644 --- a/packages/settings-library/tests/test_postgres.py +++ b/packages/settings-library/tests/test_postgres.py @@ -14,7 +14,7 @@ def env_file(): def test_cached_property_dsn(mock_environment: dict): - settings = PostgresSettings() + settings = PostgresSettings() # type: ignore[call-arg] # all are upper-case assert all(key == key.upper() for key in settings.model_dump()) @@ -27,7 +27,7 @@ def test_cached_property_dsn(mock_environment: dict): def test_dsn_with_query(mock_environment: dict, monkeypatch): - settings = PostgresSettings() + settings = PostgresSettings() # type: ignore[call-arg] assert not settings.POSTGRES_CLIENT_NAME assert settings.dsn == "postgresql://foo:secret@localhost:5432/foodb" @@ -35,7 +35,7 @@ def test_dsn_with_query(mock_environment: dict, monkeypatch): # now with app monkeypatch.setenv("POSTGRES_CLIENT_NAME", "Some &43 funky name") - settings_with_app = PostgresSettings() + settings_with_app = PostgresSettings() # type: ignore[call-arg] assert settings_with_app.POSTGRES_CLIENT_NAME assert ( From 12834884d5126dd601698b187f0a0c7830e6f889 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 11:59:51 +0200 Subject: [PATCH 116/280] fix utils cli --- .../src/settings_library/utils_cli.py | 65 +++++++++++++++---- .../settings-library/tests/test_utils_cli.py | 11 +--- 2 files changed, 54 insertions(+), 22 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index fa4a7e9a36b..ad281d85e39 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -1,3 +1,4 @@ +import json import logging import os from collections.abc import Callable @@ -6,12 +7,36 @@ import rich import typer -from pydantic import ValidationError +from pydantic import SecretStr, ValidationError from pydantic_settings import BaseSettings from ._constants import HEADER_STR -from .base import BaseCustomSettings -from .utils_encoders import create_json_encoder_wo_secrets +from .base import BaseCustomSettings, _get_type + + +def model_dump_with_secrets( + settings_obj: BaseSettings, show_secret: bool, **pydantic_export_options +) -> dict[str, Any]: + data = settings_obj.model_dump(**pydantic_export_options) + + for field_name in settings_obj.model_fields: + field_data = data[field_name] + + if isinstance(field_data, SecretStr): + if show_secret: + data[field_name] = field_data.get_secret_value() # Expose the raw value + else: + data[field_name] = str(field_data) + elif isinstance(field_data, dict): + field_type = _get_type(settings_obj.model_fields[field_name]) + if issubclass(field_type, BaseSettings): + data[field_name] = model_dump_with_secrets( + field_type.model_validate(field_data), + show_secret, + **pydantic_export_options, + ) + + return data def print_as_envfile( @@ -25,8 +50,9 @@ def print_as_envfile( exclude_unset = pydantic_export_options.get("exclude_unset", False) for name, field in settings_obj.model_fields.items(): - auto_default_from_env = field.json_schema_extra is not None and field.json_schema_extra.get( - "auto_default_from_env", False + auto_default_from_env = ( + field.json_schema_extra is not None + and field.json_schema_extra.get("auto_default_from_env", False) ) value = getattr(settings_obj, name) @@ -39,7 +65,11 @@ def print_as_envfile( if isinstance(value, BaseSettings): if compact: - value = f"'{value.model_dump_json(**pydantic_export_options)}'" # flat + value = json.dumps( + model_dump_with_secrets( + value, show_secret=show_secrets, **pydantic_export_options + ) + ) # flat else: if verbose: typer.echo(f"\n# --- {name} --- ") @@ -61,9 +91,16 @@ def print_as_envfile( typer.echo(f"{name}={value}") -def print_as_json(settings_obj, *, compact=False, **pydantic_export_options): +def print_as_json( + settings_obj, *, compact=False, show_secrets, **pydantic_export_options +): typer.echo( - settings_obj.model_dump_json(indent=None if compact else 2, **pydantic_export_options) + json.dumps( + model_dump_with_secrets( + settings_obj, show_secret=show_secrets, **pydantic_export_options + ), + indent=None if compact else 2, + ) ) @@ -127,14 +164,14 @@ def settings( raise pydantic_export_options: dict[str, Any] = {"exclude_unset": exclude_unset} - if show_secrets: - # NOTE: this option is for json-only - pydantic_export_options["encoder"] = create_json_encoder_wo_secrets( - settings_cls - ) if as_json: - print_as_json(settings_obj, compact=compact, **pydantic_export_options) + print_as_json( + settings_obj, + compact=compact, + show_secrets=show_secrets, + **pydantic_export_options, + ) else: print_as_envfile( settings_obj, diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 9eb621a5bd5..2befde10b0e 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -18,10 +18,10 @@ from settings_library.utils_cli import ( create_settings_command, create_version_callback, + model_dump_with_secrets, print_as_envfile, print_as_json, ) -from settings_library.utils_encoders import create_json_encoder_wo_secrets from typer.testing import CliRunner log = logging.getLogger(__name__) @@ -84,12 +84,7 @@ def fake_granular_env_file_content() -> str: @pytest.fixture def export_as_dict() -> Callable: def _export(model_obj, **export_options): - return json.loads( - model_obj.model_dump_json( - encoder=create_json_encoder_wo_secrets(model_obj.__class__), - **export_options, - ) - ) + return model_dump_with_secrets(model_obj, show_secret=True, **export_options) return _export @@ -439,7 +434,7 @@ class FakeSettings(BaseCustomSettings): assert "secret" not in captured.out assert "Some info" not in captured.out - print_as_json(settings_obj, compact=True) + print_as_json(settings_obj, compact=True, show_secrets=False) captured = capsys.readouterr() assert "secret" not in captured.out assert "**" in captured.out From 5cecc63b6400402fed26629c1b290e52c8384f74 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 16:16:28 +0200 Subject: [PATCH 117/280] fix encoder issues --- .../utils/pydantic_fields_extension.py | 23 ++++++++++++ .../src/models_library/utils/serialization.py | 29 +++++++++++++++ .../deferred_tasks/test_deferred_tasks.py | 20 ++++++++-- .../src/settings_library/base.py | 37 ++++++------------- .../src/settings_library/utils_cli.py | 30 ++------------- 5 files changed, 82 insertions(+), 57 deletions(-) create mode 100644 packages/models-library/src/models_library/utils/pydantic_fields_extension.py create mode 100644 packages/models-library/src/models_library/utils/serialization.py diff --git a/packages/models-library/src/models_library/utils/pydantic_fields_extension.py b/packages/models-library/src/models_library/utils/pydantic_fields_extension.py new file mode 100644 index 00000000000..e9701c9efeb --- /dev/null +++ b/packages/models-library/src/models_library/utils/pydantic_fields_extension.py @@ -0,0 +1,23 @@ +from types import UnionType +from typing import Any, Literal, get_args, get_origin + +from pydantic.fields import FieldInfo + + +def get_type(info: FieldInfo) -> Any: + field_type = info.annotation + if args := get_args(info.annotation): + field_type = next(a for a in args if a != type(None)) + return field_type + + +def is_literal(info: FieldInfo) -> bool: + origin = get_origin(info.annotation) + return origin is Literal + + +def is_nullable(info: FieldInfo) -> bool: + origin = get_origin(info.annotation) # X | None or Optional[X] will return Union + if origin is UnionType: + return any(x in get_args(info.annotation) for x in (type(None), Any)) + return False diff --git a/packages/models-library/src/models_library/utils/serialization.py b/packages/models-library/src/models_library/utils/serialization.py new file mode 100644 index 00000000000..adc3ca0b361 --- /dev/null +++ b/packages/models-library/src/models_library/utils/serialization.py @@ -0,0 +1,29 @@ +from typing import Any + +from models_library.utils.pydantic_fields_extension import get_type +from pydantic import BaseModel, SecretStr + + +def model_dump_with_secrets( + settings_obj: BaseModel, show_secrets: bool, **pydantic_export_options +) -> dict[str, Any]: + data = settings_obj.model_dump(**pydantic_export_options) + + for field_name in settings_obj.model_fields: + field_data = data[field_name] + + if isinstance(field_data, SecretStr): + if show_secrets: + data[field_name] = field_data.get_secret_value() # Expose the raw value + else: + data[field_name] = str(field_data) + elif isinstance(field_data, dict): + field_type = get_type(settings_obj.model_fields[field_name]) + if issubclass(field_type, BaseModel): + data[field_name] = model_dump_with_secrets( + field_type.model_validate(field_data), + show_secrets, + **pydantic_export_options, + ) + + return data diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index ca77d8a1a6b..44a0a9379ed 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -16,6 +16,8 @@ import psutil import pytest from aiohttp.test_utils import unused_port +from models_library.utils.json_serialization import json_dumps +from models_library.utils.serialization import model_dump_with_secrets from pydantic import NonNegativeFloat, NonNegativeInt from pytest_mock import MockerFixture from servicelib import redis as servicelib_redis @@ -24,7 +26,6 @@ from servicelib.sequences_utils import partition_gen from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings -from settings_library.utils_encoders import create_json_encoder_wo_secrets from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -123,7 +124,6 @@ async def _tcp_command( def _get_serialization_options() -> dict[str, Any]: return { - "encoder": create_json_encoder_wo_secrets(RabbitSettings), "exclude_defaults": True, "exclude_none": True, "exclude_unset": True, @@ -158,8 +158,20 @@ async def start(self) -> None: response = await _tcp_command( "init-context", { - "rabbit": self.rabbit_service.model_dump_json(**_get_serialization_options()), - "redis": self.redis_service.model_dump_json(**_get_serialization_options()), + "rabbit": json_dumps( + model_dump_with_secrets( + self.rabbit_service, + show_secrets=True, + **_get_serialization_options(), + ) + ), + "redis": json_dumps( + model_dump_with_secrets( + self.redis_service, + show_secrets=True, + **_get_serialization_options(), + ) + ), "max-workers": self.max_workers, }, port=self.remote_process.port, diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 761b246ff94..97b50bd6e58 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,8 +1,12 @@ import logging from functools import cached_property -from types import UnionType -from typing import Any, Final, Literal, get_args, get_origin +from typing import Any, Final, get_origin +from models_library.utils.pydantic_fields_extension import ( + get_type, + is_literal, + is_nullable, +) from pydantic import ValidationInfo, field_validator from pydantic.fields import FieldInfo from pydantic_core import PydanticUndefined, ValidationError @@ -21,25 +25,6 @@ def __init__(self, errors): self.errors = errors -def _allows_none(info: FieldInfo) -> bool: - origin = get_origin(info.annotation) # X | None or Optional[X] will return Union - if origin is UnionType: - return any(x in get_args(info.annotation) for x in (type(None), Any)) - return False - - -def _get_type(info: FieldInfo) -> Any: - field_type = info.annotation - if args := get_args(info.annotation): - field_type = next(a for a in args if a != type(None)) - return field_type - - -def _is_literal(info: FieldInfo) -> bool: - origin = get_origin(info.annotation) - return origin is Literal - - def _create_settings_from_env(field_name: str, info: FieldInfo): # NOTE: Cannot pass only field.type_ because @prepare_field (when this function is called) # this value is still not resolved (field.type_ at that moment has a weak_ref). @@ -47,12 +32,12 @@ def _create_settings_from_env(field_name: str, info: FieldInfo): def _default_factory(): """Creates default from sub-settings or None (if nullable)""" - field_settings_cls = _get_type(info) + field_settings_cls = get_type(info) try: return field_settings_cls() except ValidationError as err: - if _allows_none(info): + if is_nullable(info): # e.g. Optional[PostgresSettings] would warn if defaults to None _logger.warning( _DEFAULTS_TO_NONE_MSG, @@ -80,7 +65,7 @@ def _parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! if ( info.field_name - and _allows_none(cls.model_fields[info.field_name]) + and is_nullable(cls.model_fields[info.field_name]) and isinstance(v, str) and v.lower() in ("none",) ): @@ -107,13 +92,13 @@ def __pydantic_init_subclass__(cls, **kwargs: Any): "auto_default_from_env", False ) ) - field_type = _get_type(field) + field_type = get_type(field) # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models is_not_composed = ( get_origin(field_type) is None ) # is not composed as dict[str, Any] or Generic[Base] - is_not_literal = not _is_literal(field) + is_not_literal = not is_literal(field) if ( is_not_literal diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index ad281d85e39..cfbf34802fe 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -7,36 +7,12 @@ import rich import typer -from pydantic import SecretStr, ValidationError +from models_library.utils.serialization import model_dump_with_secrets +from pydantic import ValidationError from pydantic_settings import BaseSettings from ._constants import HEADER_STR -from .base import BaseCustomSettings, _get_type - - -def model_dump_with_secrets( - settings_obj: BaseSettings, show_secret: bool, **pydantic_export_options -) -> dict[str, Any]: - data = settings_obj.model_dump(**pydantic_export_options) - - for field_name in settings_obj.model_fields: - field_data = data[field_name] - - if isinstance(field_data, SecretStr): - if show_secret: - data[field_name] = field_data.get_secret_value() # Expose the raw value - else: - data[field_name] = str(field_data) - elif isinstance(field_data, dict): - field_type = _get_type(settings_obj.model_fields[field_name]) - if issubclass(field_type, BaseSettings): - data[field_name] = model_dump_with_secrets( - field_type.model_validate(field_data), - show_secret, - **pydantic_export_options, - ) - - return data +from .base import BaseCustomSettings def print_as_envfile( From cbc556faf5c3d91bedf1fd523af365d877e9c440 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 16:29:54 +0200 Subject: [PATCH 118/280] add requirements --- packages/settings-library/requirements/ci.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index 9feda17bfaa..d508c9f3352 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages pytest-simcore @ ../pytest-simcore +simcore-models-library @ ../models-library # current module simcore-settings-library @ . From 4c27843a2cdc22fedd8d6f8630881724cf28c827 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 16:38:41 +0200 Subject: [PATCH 119/280] skip missing data --- .../models-library/src/models_library/utils/serialization.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/models-library/src/models_library/utils/serialization.py b/packages/models-library/src/models_library/utils/serialization.py index adc3ca0b361..d05caf64c56 100644 --- a/packages/models-library/src/models_library/utils/serialization.py +++ b/packages/models-library/src/models_library/utils/serialization.py @@ -10,6 +10,9 @@ def model_dump_with_secrets( data = settings_obj.model_dump(**pydantic_export_options) for field_name in settings_obj.model_fields: + if field_name not in data: + continue + field_data = data[field_name] if isinstance(field_data, SecretStr): From 9c4e07e4ad0e57e20348f09f02695c5fa0a71918 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 16:43:43 +0200 Subject: [PATCH 120/280] fix import --- packages/settings-library/tests/test__pydantic_settings.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index c97ff0f7289..4c946d77ff3 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -15,7 +15,7 @@ from pydantic import ValidationInfo, field_validator from pydantic.fields import PydanticUndefined from pydantic_settings import BaseSettings -from settings_library.base import _allows_none +from models_library.utils.pydantic_fields_extension import is_nullable def assert_field_specs( @@ -29,7 +29,7 @@ def assert_field_specs( print(info) assert info.is_required() == is_required - assert _allows_none(info) == is_nullable + assert is_nullable(info) == is_nullable if info.is_required(): # in this case, default is not really used @@ -55,7 +55,7 @@ class Settings(BaseSettings): @classmethod def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! - if info.field_name and _allows_none(cls.model_fields[info.field_name]): + if info.field_name and is_nullable(cls.model_fields[info.field_name]): if isinstance(v, str) and v.lower() in ("null", "none"): return None return v From 6c58ae5c12c187c50f2732746edc95fb2d1b6c70 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 17:30:12 +0200 Subject: [PATCH 121/280] fix option name --- packages/settings-library/tests/test_utils_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 2befde10b0e..b4e50902124 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -84,7 +84,7 @@ def fake_granular_env_file_content() -> str: @pytest.fixture def export_as_dict() -> Callable: def _export(model_obj, **export_options): - return model_dump_with_secrets(model_obj, show_secret=True, **export_options) + return model_dump_with_secrets(model_obj, show_secrets=True, **export_options) return _export From 76b5cba79c602775157829634a611ce2ace85055 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 18:46:47 +0200 Subject: [PATCH 122/280] continue ugrading --- .../src/settings_library/utils_cli.py | 2 +- .../tests/test__pydantic_settings.py | 36 +++++++++---------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index cfbf34802fe..7b7a6b2dd35 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -43,7 +43,7 @@ def print_as_envfile( if compact: value = json.dumps( model_dump_with_secrets( - value, show_secret=show_secrets, **pydantic_export_options + value, show_secrets=show_secrets, **pydantic_export_options ) ) # flat else: diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 4c946d77ff3..3beeecc6c74 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -21,15 +21,15 @@ def assert_field_specs( model_cls: type[BaseSettings], name: str, - is_required: bool, - is_nullable: bool, + required: bool, + nullable: bool, explicit_default, ): info = model_cls.model_fields[name] print(info) - assert info.is_required() == is_required - assert is_nullable(info) == is_nullable + assert info.is_required() == required + assert is_nullable(info) == nullable if info.is_required(): # in this case, default is not really used @@ -69,32 +69,32 @@ def test_fields_declarations(): assert_field_specs( Settings, "VALUE", - is_required=True, - is_nullable=False, + required=True, + nullable=False, explicit_default=PydanticUndefined, ) assert_field_specs( Settings, "VALUE_DEFAULT", - is_required=False, - is_nullable=False, + required=False, + nullable=False, explicit_default=42, ) assert_field_specs( Settings, "VALUE_NULLABLE_REQUIRED", - is_required=True, - is_nullable=True, + required=True, + nullable=True, explicit_default=Ellipsis, ) assert_field_specs( Settings, "VALUE_NULLABLE_REQUIRED_AS_WELL", - is_required=True, - is_nullable=True, + required=True, + nullable=True, explicit_default=PydanticUndefined, # <- difference wrt VALUE_NULLABLE_DEFAULT_NULL ) @@ -105,24 +105,24 @@ def test_fields_declarations(): assert_field_specs( Settings, "VALUE_NULLABLE_DEFAULT_VALUE", - is_required=False, - is_nullable=True, + required=False, + nullable=True, explicit_default=42, ) assert_field_specs( Settings, "VALUE_NULLABLE_DEFAULT_NULL", - is_required=False, - is_nullable=True, + required=False, + nullable=True, explicit_default=None, ) assert_field_specs( Settings, "VALUE_REQUIRED_AS_WELL", - is_required=True, - is_nullable=False, + required=True, + nullable=False, explicit_default=Ellipsis, ) From 70211178634dfa9abd699fe0256fbdacd984f300 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 19:32:13 +0200 Subject: [PATCH 123/280] firx param name --- packages/settings-library/src/settings_library/utils_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 7b7a6b2dd35..05e0f0ca7bc 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -73,7 +73,7 @@ def print_as_json( typer.echo( json.dumps( model_dump_with_secrets( - settings_obj, show_secret=show_secrets, **pydantic_export_options + settings_obj, show_secrets=show_secrets, **pydantic_export_options ), indent=None if compact else 2, ) From eae47833d8e427e594115260aad719592ed563c3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 20:36:34 +0200 Subject: [PATCH 124/280] upgrade requirements --- services/agent/requirements/_base.txt | 35 ++++++++++++++++---------- services/agent/requirements/_test.txt | 19 ++++++++------ services/agent/requirements/_tools.txt | 12 +-------- 3 files changed, 34 insertions(+), 32 deletions(-) diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index 394611702aa..04f6020c874 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -11,6 +11,8 @@ aiohttp==3.8.5 # aiodocker aiosignal==1.2.0 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==3.6.2 # via # httpx @@ -41,16 +43,10 @@ click==8.1.3 # uvicorn dnspython==2.2.1 # via email-validator -email-validator==1.3.0 +email-validator==2.2.0 # via pydantic -fastapi==0.96.0 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -106,12 +102,11 @@ prometheus-client==0.19.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -pydantic==1.10.2 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -119,6 +114,16 @@ pydantic==1.10.2 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.15.1 # via rich pyrsistent==0.19.2 @@ -126,7 +131,9 @@ pyrsistent==0.19.2 python-dateutil==2.8.2 # via arrow python-dotenv==1.0.0 - # via -r requirements/_base.in + # via + # -r requirements/_base.in + # pydantic-settings rich==13.4.2 # via # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -143,7 +150,7 @@ sniffio==1.3.0 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -154,10 +161,12 @@ starlette==0.27.0 # fastapi typer==0.12.3 # via -r requirements/../../../packages/settings-library/requirements/_base.in -typing-extensions==4.4.0 +typing-extensions==4.12.2 # via # aiodocker + # fastapi # pydantic + # pydantic-core # typer uvicorn==0.19.0 # via diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt index fd7c0889fce..c3e2c6ee2fc 100644 --- a/services/agent/requirements/_test.txt +++ b/services/agent/requirements/_test.txt @@ -13,6 +13,10 @@ aiosignal==1.2.0 # via # -c requirements/_base.txt # aiohttp +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic anyio==3.6.2 # via # -c requirements/_base.txt @@ -81,8 +85,6 @@ ecdsa==0.19.0 # moto # python-jose # sshpubkeys -exceptiongroup==1.2.2 - # via pytest faker==27.0.0 # via -r requirements/_test.in flask==3.0.3 @@ -178,11 +180,15 @@ pyasn1==0.6.0 # rsa pycparser==2.22 # via cffi -pydantic==1.10.2 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pyrsistent==0.19.2 @@ -257,16 +263,13 @@ sshpubkeys==3.3.1 # via moto sympy==1.13.2 # via cfn-lint -tomli==2.0.1 - # via - # coverage - # pytest -typing-extensions==4.4.0 +typing-extensions==4.12.2 # via # -c requirements/_base.txt # aws-sam-translator # cfn-lint # pydantic + # pydantic-core urllib3==2.2.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/agent/requirements/_tools.txt b/services/agent/requirements/_tools.txt index 69a9671437c..8c9416c12a5 100644 --- a/services/agent/requirements/_tools.txt +++ b/services/agent/requirements/_tools.txt @@ -77,22 +77,12 @@ setuptools==69.2.0 # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint -typing-extensions==4.4.0 +typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 798912a6525e672222f01215ad6616f4ce405461 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 20:41:20 +0200 Subject: [PATCH 125/280] run bump-pydantic --- .../src/simcore_service_agent/core/application.py | 2 +- .../agent/src/simcore_service_agent/core/settings.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index 1c2211b16f3..3ac90922dc1 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -38,7 +38,7 @@ def create_app() -> FastAPI: # SETTINGS settings = ApplicationSettings.create_from_envs() _setup_logger(settings) - logger.debug(settings.json(indent=2)) + logger.debug(settings.model_dump_json(indent=2)) assert settings.SC_BOOT_MODE # nosec app = FastAPI( diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py index 882217f9a5f..47f60862680 100644 --- a/services/agent/src/simcore_service_agent/core/settings.py +++ b/services/agent/src/simcore_service_agent/core/settings.py @@ -1,7 +1,7 @@ from typing import Final from models_library.basic_types import BootModeEnum, LogLevel -from pydantic import AnyHttpUrl, Field, NonNegativeInt, validator +from pydantic import AliasChoices, field_validator, AnyHttpUrl, Field, NonNegativeInt from settings_library.base import BaseCustomSettings from settings_library.r_clone import S3Provider from settings_library.utils_logging import MixinLoggingSettings @@ -11,16 +11,16 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): LOGLEVEL: LogLevel = Field( - LogLevel.WARNING.value, env=["AGENT_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.WARNING.value, validation_alias=AliasChoices("AGENT_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL") ) SC_BOOT_MODE: BootModeEnum | None AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str = Field( @@ -47,7 +47,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) AGENT_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - @validator("LOGLEVEL") + @field_validator("LOGLEVEL") @classmethod def valid_log_level(cls, value) -> LogLevel: return LogLevel(cls.validate_log_level(value)) From 9eb451963a83c60bf6661447e69d207b3e04ab8c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 20:47:48 +0200 Subject: [PATCH 126/280] fix url --- services/agent/tests/conftest.py | 5 ++--- services/agent/tests/unit/test_modules_volumes_cleanup_s3.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py index bd0d1a8964f..766e17da2a1 100644 --- a/services/agent/tests/conftest.py +++ b/services/agent/tests/conftest.py @@ -14,7 +14,7 @@ from models_library.basic_types import BootModeEnum from models_library.services import RunID from moto.server import ThreadedMotoServer -from pydantic import HttpUrl, parse_obj_as +from pydantic import HttpUrl, TypeAdapter from settings_library.r_clone import S3Provider from simcore_service_agent.core.settings import ApplicationSettings @@ -189,7 +189,6 @@ def caplog_info_debug( @pytest.fixture(scope="module") def mocked_s3_server_url(mocked_aws_server: ThreadedMotoServer) -> HttpUrl: # pylint: disable=protected-access - return parse_obj_as( - HttpUrl, + return TypeAdapter(HttpUrl).validate_python( f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # noqa: SLF001 ) diff --git a/services/agent/tests/unit/test_modules_volumes_cleanup_s3.py b/services/agent/tests/unit/test_modules_volumes_cleanup_s3.py index 1728d0bb029..ea7c0f28ecb 100644 --- a/services/agent/tests/unit/test_modules_volumes_cleanup_s3.py +++ b/services/agent/tests/unit/test_modules_volumes_cleanup_s3.py @@ -149,7 +149,7 @@ async def test_store_to_s3( ) await _download_files_from_bucket( - endpoint=mocked_s3_server_url, + endpoint=str(mocked_s3_server_url), access_key="xxx", secret_key="xxx", bucket_name=bucket, From 6cbe7d977be3a69c75e7b6009826344d74de72d6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:03:48 +0200 Subject: [PATCH 127/280] ugprade requirements --- services/invitations/requirements/_base.txt | 42 +++++++++++--------- services/invitations/requirements/_test.txt | 14 ------- services/invitations/requirements/_tools.txt | 11 ----- 3 files changed, 23 insertions(+), 44 deletions(-) diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index 68a381d3f12..413ac0b5d35 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -19,6 +19,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -31,10 +33,6 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==23.2.0 # via # aiohttp @@ -69,18 +67,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -149,12 +139,11 @@ prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in pycparser==2.21 # via cffi -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -165,6 +154,20 @@ pydantic==1.10.14 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -172,7 +175,9 @@ pyinstrument==4.6.2 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn pyyaml==6.0.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -214,7 +219,7 @@ sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -241,12 +246,11 @@ typing-extensions==4.10.0 # via # aiodebug # aiodocker - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer - # uvicorn uvicorn==0.29.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in diff --git a/services/invitations/requirements/_test.txt b/services/invitations/requirements/_test.txt index 4571dba8ffa..7b029ccb0de 100644 --- a/services/invitations/requirements/_test.txt +++ b/services/invitations/requirements/_test.txt @@ -16,12 +16,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # anyio - # hypothesis - # pytest faker==27.0.0 # via -r requirements/_test.in h11==0.14.0 @@ -90,11 +84,3 @@ sortedcontainers==2.4.0 # via hypothesis termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest -typing-extensions==4.10.0 - # via - # -c requirements/_base.txt - # anyio diff --git a/services/invitations/requirements/_tools.txt b/services/invitations/requirements/_tools.txt index b17c8a2c2d8..f3932792cef 100644 --- a/services/invitations/requirements/_tools.txt +++ b/services/invitations/requirements/_tools.txt @@ -70,22 +70,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt - # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 82c7812e91328213347d11eb90e4876832a902e8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:19:36 +0200 Subject: [PATCH 128/280] run bump-pydantic --- .../core/settings.py | 18 +++++--- .../services/invitations.py | 41 ++++++------------- 2 files changed, 26 insertions(+), 33 deletions(-) diff --git a/services/invitations/src/simcore_service_invitations/core/settings.py b/services/invitations/src/simcore_service_invitations/core/settings.py index 6af076f4229..29d286f1020 100644 --- a/services/invitations/src/simcore_service_invitations/core/settings.py +++ b/services/invitations/src/simcore_service_invitations/core/settings.py @@ -1,7 +1,14 @@ from functools import cached_property from models_library.products import ProductName -from pydantic import Field, HttpUrl, PositiveInt, SecretStr, validator +from pydantic import ( + AliasChoices, + Field, + HttpUrl, + PositiveInt, + SecretStr, + field_validator, +) from settings_library.base import BaseCustomSettings from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag from settings_library.utils_logging import MixinLoggingSettings @@ -38,14 +45,15 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- INVITATIONS_LOGLEVEL: LogLevel = Field( - default=LogLevel.INFO, env=["INVITATIONS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + default=LogLevel.INFO, + validation_alias=AliasChoices("INVITATIONS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -53,7 +61,7 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self): return self.INVITATIONS_LOGLEVEL - @validator("INVITATIONS_LOGLEVEL") + @field_validator("INVITATIONS_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) diff --git a/services/invitations/src/simcore_service_invitations/services/invitations.py b/services/invitations/src/simcore_service_invitations/services/invitations.py index d5bed8662cf..582014925de 100644 --- a/services/invitations/src/simcore_service_invitations/services/invitations.py +++ b/services/invitations/src/simcore_service_invitations/services/invitations.py @@ -1,18 +1,21 @@ import base64 import binascii import logging -from typing import Any, ClassVar, cast from urllib import parse from cryptography.fernet import Fernet, InvalidToken from models_library.invitations import InvitationContent, InvitationInputs from models_library.products import ProductName -from pydantic import HttpUrl, ValidationError, parse_obj_as +from pydantic import ConfigDict, HttpUrl, TypeAdapter, ValidationError from starlette.datastructures import URL _logger = logging.getLogger(__name__) +def _to_initial(v: str): + return v[0] + + class InvalidInvitationCodeError(Exception): ... @@ -25,7 +28,7 @@ def serialize(cls, model_obj: InvitationContent) -> str: """Exports to json using *short* aliases and values in order to produce shorter codes""" model_w_short_aliases_json: str = cls.construct( **model_obj.dict(exclude_unset=True) - ).json(exclude_unset=True, by_alias=True) + ).model_dump_json(exclude_unset=True, by_alias=True) # NOTE: json arguments try to minimize the amount of data # serialized. The CONS is that it relies on models in the code # that might change over time. This might lead to some datasets in codes @@ -40,31 +43,13 @@ def deserialize(cls, raw_json: str) -> InvitationContent: **model_w_short_aliases.dict(exclude_unset=True) ) - class Config: - allow_population_by_field_name = True # NOTE: can parse using field names - allow_mutation = False - anystr_strip_whitespace = True + model_config = ConfigDict( # NOTE: Can export with alias: short aliases to minimize the size of serialization artifact - fields: ClassVar[dict[str, Any]] = { - "issuer": { - "alias": "i", - }, - "guest": { - "alias": "g", - }, - "trial_account_days": { - "alias": "t", - }, - "extra_credits_in_usd": { - "alias": "e", - }, - "product": { - "alias": "p", - }, - "created": { - "alias": "c", - }, - } + alias_generator=_to_initial, + populate_by_name=True, # NOTE: can parse using field names + frozen=True, + str_strip_whitespace=True, + ) # @@ -81,7 +66,7 @@ def _build_link( # Adds query to fragment base_url = f"{base_url.rstrip('/')}/" url = URL(base_url).replace(fragment=f"{r}") - return cast(HttpUrl, parse_obj_as(HttpUrl, f"{url}")) + return TypeAdapter(HttpUrl).validate_python(f"{url}") def _fernet_encrypt_as_urlsafe_code( From b222869e4774e9b4ec892ce557bf3b7f7bfcc89e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:25:59 +0200 Subject: [PATCH 129/280] continue upgrading --- .../src/simcore_service_invitations/cli.py | 16 ++++++++-------- services/invitations/tests/unit/api/conftest.py | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/invitations/src/simcore_service_invitations/cli.py b/services/invitations/src/simcore_service_invitations/cli.py index dffb1dca32f..cfb7cf2717a 100644 --- a/services/invitations/src/simcore_service_invitations/cli.py +++ b/services/invitations/src/simcore_service_invitations/cli.py @@ -5,7 +5,7 @@ from cryptography.fernet import Fernet from models_library.emails import LowerCaseEmailStr from models_library.invitations import InvitationContent, InvitationInputs -from pydantic import EmailStr, HttpUrl, ValidationError, parse_obj_as +from pydantic import EmailStr, HttpUrl, TypeAdapter, ValidationError from rich.console import Console from servicelib.utils_secrets import generate_password from settings_library.utils_cli import ( @@ -96,19 +96,19 @@ def invite( ctx: typer.Context, email: str = typer.Argument( ..., - callback=lambda v: parse_obj_as(LowerCaseEmailStr, v), + callback=lambda v: TypeAdapter(LowerCaseEmailStr).validate_python(v), help="Custom invitation for a given guest", ), issuer: str = typer.Option( - ..., help=InvitationInputs.__fields__["issuer"].field_info.description + ..., help=InvitationInputs.model_fields["issuer"].description ), trial_account_days: int = typer.Option( None, - help=InvitationInputs.__fields__["trial_account_days"].field_info.description, + help=InvitationInputs.model_fields["trial_account_days"].description, ), product: str = typer.Option( None, - help=InvitationInputs.__fields__["product"].field_info.description, + help=InvitationInputs.model_fields["product"].description, ), ): """Creates an invitation link for user with 'email' and issued by 'issuer'""" @@ -117,7 +117,7 @@ def invite( invitation_data = InvitationInputs( issuer=issuer, - guest=parse_obj_as(EmailStr, email), + guest=TypeAdapter(EmailStr).validate_python(email), trial_account_days=trial_account_days, extra_credits_in_usd=None, product=product, @@ -142,14 +142,14 @@ def extract(ctx: typer.Context, invitation_url: str): try: invitation: InvitationContent = extract_invitation_content( invitation_code=extract_invitation_code_from_query( - parse_obj_as(HttpUrl, invitation_url) + TypeAdapter(HttpUrl).validate_python(invitation_url) ), secret_key=settings.INVITATIONS_SECRET_KEY.get_secret_value().encode(), default_product=settings.INVITATIONS_DEFAULT_PRODUCT, ) assert invitation.product is not None # nosec - print(invitation.json(indent=1)) # noqa: T201 + print(invitation.model_dump_json(indent=1)) # noqa: T201 except (InvalidInvitationCodeError, ValidationError): _err_console.print("[bold red]Invalid code[/bold red]") diff --git a/services/invitations/tests/unit/api/conftest.py b/services/invitations/tests/unit/api/conftest.py index f4151fcc519..c558ac496ad 100644 --- a/services/invitations/tests/unit/api/conftest.py +++ b/services/invitations/tests/unit/api/conftest.py @@ -18,7 +18,7 @@ def client(app_environment: EnvVarsDict) -> Iterator[TestClient]: print(f"app_environment={json.dumps(app_environment)}") app = create_app() - print("settings:\n", app.state.settings.json(indent=1)) + print("settings:\n", app.state.settings.model_dump_json(indent=1)) with TestClient(app, base_url="http://testserver.test") as client: yield client From 5571be2a8cc482be6d0abc08b3468e3f609050e6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:29:24 +0200 Subject: [PATCH 130/280] upgrade requirements --- services/payments/requirements/_base.txt | 213 +++++++++++++++++++--- services/payments/requirements/_test.txt | 19 -- services/payments/requirements/_tools.txt | 10 - 3 files changed, 191 insertions(+), 51 deletions(-) diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index e14e1e9e171..cde8bc6b133 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -1,17 +1,33 @@ aio-pika==9.4.2 + # via -r requirements/../../../packages/service-library/requirements/_base.in aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.22.2 + # via -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.3.4 # via aiohttp aiohttp==3.10.0 - # via aiodocker + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp aiosmtplib==3.0.2 + # via -r requirements/_base.in alembic==1.13.2 + # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -20,11 +36,12 @@ anyio==4.4.0 # starlette # watchfiles arrow==1.3.0 -async-timeout==4.0.3 # via - # aiohttp - # asyncpg - # redis + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +async-timeout==4.0.3 + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -36,6 +53,13 @@ bidict==0.23.1 # via python-socketio certifi==2024.7.4 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt # httpcore # httpx cffi==1.16.0 @@ -45,19 +69,30 @@ click==8.1.7 # typer # uvicorn cryptography==43.0.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in dnspython==2.6.1 # via email-validator ecdsa==0.19.0 # via python-jose email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via anyio fast-depends==2.4.7 # via faststream -fastapi==0.99.1 - # via prometheus-fastapi-instrumentator +fastapi==0.115.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # prometheus-fastapi-instrumentator faststream==0.5.15 + # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 # via # aiohttp @@ -74,6 +109,16 @@ httpcore==1.0.5 httptools==0.6.1 # via uvicorn httpx==0.27.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in idna==3.7 # via # anyio @@ -81,11 +126,31 @@ idna==3.7 # httpx # yarl jinja2==3.1.4 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.5 - # via alembic + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # alembic markdown-it-py==3.0.0 # via rich markupsafe==2.1.5 @@ -99,12 +164,26 @@ multidict==6.0.5 # aiohttp # yarl orjson==3.10.6 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in packaging==24.1 + # via -r requirements/_base.in pamqp==3.3.0 # via aiormq prometheus-client==0.20.0 - # via prometheus-fastapi-instrumentator + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in psycopg2-binary==2.9.9 # via sqlalchemy pyasn1==0.6.0 @@ -113,37 +192,100 @@ pyasn1==0.6.0 # rsa pycparser==2.22 # via cffi -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 + # via -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn python-engineio==4.9.1 # via python-socketio python-jose==3.3.0 + # via -r requirements/_base.in python-multipart==0.0.9 + # via -r requirements/_base.in python-socketio==5.11.3 + # via -r requirements/_base.in pyyaml==6.0.1 - # via uvicorn + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # uvicorn redis==5.0.8 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in referencing==0.29.3 # via + # -c requirements/../../../packages/service-library/requirements/./constraints.txt # jsonschema # jsonschema-specifications rich==13.7.1 - # via typer + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer rpds-py==0.19.1 # via # jsonschema # referencing rsa==4.9 - # via python-jose + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # python-jose shellingham==1.5.4 # via typer simple-websocket==1.0.0 @@ -157,27 +299,53 @@ sniffio==1.3.1 # anyio # httpx sqlalchemy==1.4.53 - # via alembic -starlette==0.27.0 - # via fastapi + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/postgres-database/requirements/_base.in + # alembic +starlette==0.38.6 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi tenacity==9.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==0.12.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.66.4 + # via -r requirements/../../../packages/service-library/requirements/_base.in typer==0.12.3 - # via faststream + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/_base.in + # faststream types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.12.2 # via # aiodebug # alembic - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer - # uvicorn uvicorn==0.30.4 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in uvloop==0.19.0 # via uvicorn watchfiles==0.22.0 @@ -188,6 +356,7 @@ wsproto==1.2.0 # via simple-websocket yarl==1.9.4 # via + # -r requirements/../../../packages/postgres-database/requirements/_base.in # aio-pika # aiohttp # aiormq diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt index ad73ea53868..4417a86f228 100644 --- a/services/payments/requirements/_test.txt +++ b/services/payments/requirements/_test.txt @@ -17,10 +17,6 @@ anyio==4.4.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp attrs==23.2.0 # via # -c requirements/_base.txt @@ -44,11 +40,6 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.2 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in frozenlist==1.4.1 @@ -56,10 +47,6 @@ frozenlist==1.4.1 # -c requirements/_base.txt # aiohttp # aiosignal -greenlet==3.0.3 - # via - # -c requirements/_base.txt - # sqlalchemy h11==0.14.0 # via # -c requirements/_base.txt @@ -170,11 +157,6 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in types-pyasn1==0.6.0.20240402 @@ -186,7 +168,6 @@ types-pyyaml==6.0.12.20240808 typing-extensions==4.12.2 # via # -c requirements/_base.txt - # anyio # mypy # sqlalchemy2-stubs urllib3==2.2.2 diff --git a/services/payments/requirements/_tools.txt b/services/payments/requirements/_tools.txt index cee6887ecd0..fb799ce9990 100644 --- a/services/payments/requirements/_tools.txt +++ b/services/payments/requirements/_tools.txt @@ -72,22 +72,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 610a8efd77f3f4571fa40f604b6ee30a4d772169 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:39:53 +0200 Subject: [PATCH 131/280] continue upgrading --- .../simcore_service_payments/core/settings.py | 31 +++++++++------ .../db/auto_recharge_repo.py | 6 +-- .../src/simcore_service_payments/models/db.py | 39 +++++++++---------- .../models/payments_gateway.py | 10 ++--- .../models/schemas/acknowledgements.py | 18 ++++----- .../models/schemas/meta.py | 10 ++--- 6 files changed, 57 insertions(+), 57 deletions(-) diff --git a/services/payments/src/simcore_service_payments/core/settings.py b/services/payments/src/simcore_service_payments/core/settings.py index 3e8b2d44d76..78759c3cc7f 100644 --- a/services/payments/src/simcore_service_payments/core/settings.py +++ b/services/payments/src/simcore_service_payments/core/settings.py @@ -2,13 +2,14 @@ from models_library.basic_types import NonNegativeDecimal from pydantic import ( + AliasChoices, EmailStr, Field, HttpUrl, PositiveFloat, SecretStr, - parse_obj_as, - validator, + TypeAdapter, + field_validator, ) from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag @@ -27,19 +28,24 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = PROJECT_NAME - API_VTAG: VersionTag = parse_obj_as(VersionTag, API_VTAG) + API_VTAG: VersionTag = TypeAdapter(VersionTag).validate_python(API_VTAG) # RUNTIME ----------------------------------------------------------- PAYMENTS_LOGLEVEL: LogLevel = Field( - default=LogLevel.INFO, env=["PAYMENTS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + default=LogLevel.INFO, + validation_alias=AliasChoices( + "PAYMENTS_LOGLEVEL", + "LOG_LEVEL", + "LOGLEVEL", + ), ) PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -47,7 +53,7 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): def LOG_LEVEL(self): # noqa: N802 return self.PAYMENTS_LOGLEVEL - @validator("PAYMENTS_LOGLEVEL") + @field_validator("PAYMENTS_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @@ -110,11 +116,13 @@ class ApplicationSettings(_BaseApplicationSettings): ) PAYMENTS_RABBITMQ: RabbitSettings = Field( - auto_default_from_env=True, description="settings for service/rabbitmq" + description="settings for service/rabbitmq", + json_schema_extra={"auto_default_from_env": True}, ) PAYMENTS_POSTGRES: PostgresSettings = Field( - auto_default_from_env=True, description="settings for postgres service" + description="settings for postgres service", + json_schema_extra={"auto_default_from_env": True}, ) PAYMENTS_STRIPE_URL: HttpUrl = Field( @@ -129,12 +137,13 @@ class ApplicationSettings(_BaseApplicationSettings): ) PAYMENTS_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - auto_default_from_env=True, description="settings for RUT service" + description="settings for RUT service", + json_schema_extra={"auto_default_from_env": True}, ) PAYMENTS_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True PAYMENTS_EMAIL: SMTPSettings | None = Field( - auto_default_from_env=True, description="optional email (see notifier_email service)", + json_schema_extra={"auto_default_from_env": True}, ) diff --git a/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py b/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py index 4e7b25d228e..539e8d7d693 100644 --- a/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py +++ b/services/payments/src/simcore_service_payments/db/auto_recharge_repo.py @@ -5,7 +5,7 @@ from models_library.basic_types import NonNegativeDecimal from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, PositiveInt +from pydantic import BaseModel, ConfigDict, PositiveInt from simcore_postgres_database.utils_payments_autorecharge import AutoRechargeStmts from .base import BaseRepository @@ -19,9 +19,7 @@ class PaymentsAutorechargeDB(BaseModel): primary_payment_method_id: PaymentMethodID top_up_amount_in_usd: NonNegativeDecimal monthly_limit_in_usd: NonNegativeDecimal | None - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class AutoRechargeRepo(BaseRepository): diff --git a/services/payments/src/simcore_service_payments/models/db.py b/services/payments/src/simcore_service_payments/models/db.py index 8d69b8de70d..f14ce83a234 100644 --- a/services/payments/src/simcore_service_payments/models/db.py +++ b/services/payments/src/simcore_service_payments/models/db.py @@ -1,6 +1,5 @@ import datetime from decimal import Decimal -from typing import Any, ClassVar from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID from models_library.emails import LowerCaseEmailStr @@ -8,7 +7,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, HttpUrl +from pydantic import BaseModel, ConfigDict, HttpUrl from simcore_postgres_database.models.payments_methods import InitPromptAckFlowState from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, @@ -39,18 +38,17 @@ class PaymentsTransactionsDB(BaseModel): user_id: UserID user_email: LowerCaseEmailStr wallet_id: WalletID - comment: str | None - invoice_url: HttpUrl | None - stripe_invoice_id: StripeInvoiceID | None - invoice_pdf_url: HttpUrl | None + comment: str | None = None + invoice_url: HttpUrl | None = None + stripe_invoice_id: StripeInvoiceID | None = None + invoice_pdf_url: HttpUrl | None = None initiated_at: datetime.datetime - completed_at: datetime.datetime | None + completed_at: datetime.datetime | None = None state: PaymentTransactionState - state_message: str | None - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + state_message: str | None = None + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ _EXAMPLE_AFTER_INIT, # successful completion @@ -64,7 +62,8 @@ class Config: "state_message": "Payment completed successfully", }, ] - } + }, + ) _EXAMPLE_AFTER_INIT_PAYMENT_METHOD = { @@ -83,13 +82,12 @@ class PaymentsMethodsDB(BaseModel): wallet_id: WalletID # State in Flow initiated_at: datetime.datetime - completed_at: datetime.datetime | None + completed_at: datetime.datetime | None = None state: InitPromptAckFlowState - state_message: str | None - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + state_message: str | None = None + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ _EXAMPLE_AFTER_INIT_PAYMENT_METHOD, # successful completion @@ -100,4 +98,5 @@ class Config: "state_message": "Payment method completed successfully", }, ] - } + }, + ) diff --git a/services/payments/src/simcore_service_payments/models/payments_gateway.py b/services/payments/src/simcore_service_payments/models/payments_gateway.py index e0d7481df58..ba3a9f6b087 100644 --- a/services/payments/src/simcore_service_payments/models/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/models/payments_gateway.py @@ -7,7 +7,7 @@ from models_library.basic_types import AmountDecimal, IDStr from models_library.payments import UserInvoiceAddress from models_library.products import StripePriceID, StripeTaxRateID -from pydantic import BaseModel, EmailStr, Extra, Field +from pydantic import BaseModel, ConfigDict, EmailStr, Field COUNTRIES_WITH_VAT = ["CH", "LI"] @@ -39,9 +39,7 @@ class InitPayment(BaseModel): stripe_price_id: StripePriceID stripe_tax_rate_id: StripeTaxRateID stripe_tax_exempt_value: StripeTaxExempt - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class PaymentInitiated(BaseModel): @@ -58,9 +56,7 @@ class InitPaymentMethod(BaseModel): user_name: IDStr user_email: EmailStr wallet_name: IDStr - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class PaymentMethodInitiated(BaseModel): diff --git a/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py b/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py index 5b73282cc3c..49f2687b420 100644 --- a/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py +++ b/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py @@ -1,9 +1,9 @@ # mypy: disable-error-code=truthy-function -from typing import Any, ClassVar +from typing import Any from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID from models_library.basic_types import IDStr -from pydantic import BaseModel, Field, HttpUrl, validator +from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator class _BaseAck(BaseModel): @@ -87,14 +87,14 @@ class AckPayment(_BaseAckPayment): description="Gets the payment-method if user opted to save it during payment." "If used did not opt to save of payment-method was already saved, then it defaults to None", ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": _EXAMPLES[1].copy(), # shown in openapi.json "examples": _EXAMPLES, } + ) - @validator("invoice_url") + @field_validator("invoice_url") @classmethod def success_requires_invoice(cls, v, values): success = values.get("success") @@ -112,14 +112,14 @@ class AckPaymentWithPaymentMethod(_BaseAckPayment): payment_id: PaymentID = Field( default=None, description="Payment ID from the gateway" ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { **_ONE_TIME_SUCCESS, "payment_id": "D19EE68B-B007-4B61-A8BC-32B7115FB244", }, # shown in openapi.json } + ) assert PaymentID # nosec diff --git a/services/payments/src/simcore_service_payments/models/schemas/meta.py b/services/payments/src/simcore_service_payments/models/schemas/meta.py index 06352b54ba1..cf5e7c649a2 100644 --- a/services/payments/src/simcore_service_payments/models/schemas/meta.py +++ b/services/payments/src/simcore_service_payments/models/schemas/meta.py @@ -1,17 +1,15 @@ -from typing import Any, ClassVar - from models_library.api_schemas__common.meta import BaseMeta -from pydantic import HttpUrl +from pydantic import ConfigDict, HttpUrl class Meta(BaseMeta): docs_url: HttpUrl - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "name": "simcore_service_payments", "version": "2.4.45", "docs_url": "https://foo.io/doc", } } + ) From 65b00e95cbfc5faaa15f570a7ebe4f93b66264c2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:53:02 +0200 Subject: [PATCH 132/280] fix issues --- .../services/auto_recharge_process_message.py | 4 ++-- .../simcore_service_payments/services/payments_gateway.py | 8 ++++---- services/payments/tests/conftest.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py b/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py index 9507b32bf0a..b95dd071ee9 100644 --- a/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py +++ b/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py @@ -14,7 +14,7 @@ from models_library.rabbitmq_basic_types import RPCMethodName from models_library.rabbitmq_messages import WalletCreditsMessage from models_library.wallets import WalletID -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter, parse_obj_as from simcore_service_payments.db.auto_recharge_repo import AutoRechargeRepo from simcore_service_payments.db.payments_methods_repo import PaymentsMethodsRepo from simcore_service_payments.db.payments_transactions_repo import ( @@ -36,7 +36,7 @@ async def process_message(app: FastAPI, data: bytes) -> bool: - rabbit_message = parse_raw_as(WalletCreditsMessage, data) + rabbit_message = TypeAdapter(WalletCreditsMessage).validate_json(data) _logger.debug("Process msg: %s", rabbit_message) settings: ApplicationSettings = app.state.settings diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index 0b1097492c6..1e5748103ee 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -16,8 +16,8 @@ from fastapi.encoders import jsonable_encoder from httpx import URL, HTTPStatusError from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID -from pydantic import ValidationError, parse_raw_as -from pydantic.errors import PydanticErrorMixin +from models_library.error_classes import OsparcErrorMixin +from pydantic import TypeAdapter, ValidationError from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.fastapi.http_client import ( AttachLifespanMixin, @@ -48,11 +48,11 @@ def _parse_raw_as_or_none(cls: type, text: str | None): if text: with suppress(ValidationError): - return parse_raw_as(cls, text) + return TypeAdapter(cls).validate_json(text) return None -class PaymentsGatewayError(PydanticErrorMixin, ValueError): +class PaymentsGatewayError(OsparcErrorMixin, ValueError): msg_template = "{operation_id} error {status_code}: {reason}" @classmethod diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index 042ac85f968..3691f3c43d2 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -10,7 +10,7 @@ import simcore_service_payments from faker import Faker from models_library.users import GroupID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.utils_secrets import generate_token_secret_key @@ -89,4 +89,4 @@ def app_environment( @pytest.fixture def user_primary_group_id(faker: Faker) -> GroupID: - return parse_obj_as(GroupID, faker.pyint()) + return TypeAdapter(GroupID).validate_python(faker.pyint()) From 7eea28eebb480480e253ca19690d0d1c5d53a504 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 21:57:44 +0200 Subject: [PATCH 133/280] upgrade requirements --- .../requirements/_base.txt | 57 ++++++++++--------- .../requirements/_test.txt | 31 ++++------ .../requirements/_tools.txt | 10 ---- 3 files changed, 41 insertions(+), 57 deletions(-) diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 64c4ae399ad..8540bb3da25 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -48,6 +48,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -60,14 +62,12 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 - # via - # aiohttp - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -118,25 +118,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -272,7 +257,7 @@ prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -284,7 +269,6 @@ pydantic==1.10.14 # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -302,6 +286,26 @@ pydantic==1.10.14 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -318,7 +322,9 @@ python-dateutil==2.9.0.post0 # matplotlib # pandas python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn pytz==2024.1 # via # dateparser @@ -412,7 +418,7 @@ sqlalchemy==1.4.52 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -465,16 +471,15 @@ typing-extensions==4.10.0 # aiodebug # aiodocker # alembic - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm - # uvicorn tzdata==2024.1 # via pandas tzlocal==5.2 diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index de56ac958c7..3ff748009f7 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -2,6 +2,10 @@ alembic==1.13.1 # via # -c requirements/_base.txt # -r requirements/_test.in +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -10,10 +14,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==23.2.0 # via # -c requirements/_base.txt @@ -69,11 +69,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in fakeredis==2.23.5 @@ -86,10 +81,6 @@ flask-cors==4.0.1 # via moto graphql-core==3.2.3 # via moto -greenlet==3.0.3 - # via - # -c requirements/_base.txt - # sqlalchemy h11==0.14.0 # via # -c requirements/_base.txt @@ -191,11 +182,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via # -c requirements/_base.txt @@ -305,23 +300,17 @@ sympy==1.13.2 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-requests==2.32.0.20240712 # via -r requirements/_test.in typing-extensions==4.10.0 # via # -c requirements/_base.txt # alembic - # anyio # aws-sam-translator # cfn-lint - # fakeredis # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs urllib3==2.0.7 # via diff --git a/services/resource-usage-tracker/requirements/_tools.txt b/services/resource-usage-tracker/requirements/_tools.txt index b84cbbeb9fb..9ea4ddf842c 100644 --- a/services/resource-usage-tracker/requirements/_tools.txt +++ b/services/resource-usage-tracker/requirements/_tools.txt @@ -77,22 +77,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 824e1470d079b098fb8b04614315bb78de12fb4b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 22:02:59 +0200 Subject: [PATCH 134/280] continue upgrading --- .../core/settings.py | 32 ++++++++++++------- .../resource_tracker_credit_transactions.py | 6 ++-- .../models/resource_tracker_pricing_plans.py | 14 +++----- .../resource_tracker_pricing_unit_costs.py | 6 ++-- .../models/resource_tracker_pricing_units.py | 8 ++--- .../models/resource_tracker_service_runs.py | 20 ++++-------- 6 files changed, 38 insertions(+), 48 deletions(-) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py index 7ac7599ed58..360a811dd86 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py @@ -2,7 +2,7 @@ from functools import cached_property from models_library.basic_types import BootModeEnum -from pydantic import Field, PositiveInt, validator +from pydantic import AliasChoices, field_validator, Field, PositiveInt from settings_library.base import BaseCustomSettings from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag from settings_library.postgres import PostgresSettings @@ -44,18 +44,24 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): RESOURCE_USAGE_TRACKER_DEBUG: bool = Field( default=False, description="Debug mode", - env=["RESOURCE_USAGE_TRACKER_DEBUG", "DEBUG"], + validation_alias=AliasChoices( + "RESOURCE_USAGE_TRACKER_DEBUG", + "DEBUG", + ), ) RESOURCE_USAGE_TRACKER_LOGLEVEL: LogLevel = Field( default=LogLevel.INFO, - env=["RESOURCE_USAGE_TRACKER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "RESOURCE_USAGE_TRACKER_LOGLEVEL", + "LOG_LEVEL", + "LOGLEVEL"), ) RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -63,7 +69,7 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self) -> LogLevel: # noqa: N802 return self.RESOURCE_USAGE_TRACKER_LOGLEVEL - @validator("RESOURCE_USAGE_TRACKER_LOGLEVEL") + @field_validator("RESOURCE_USAGE_TRACKER_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @@ -77,16 +83,18 @@ class MinimalApplicationSettings(_BaseApplicationSettings): """ RESOURCE_USAGE_TRACKER_PROMETHEUS: PrometheusSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env":True} ) RESOURCE_USAGE_TRACKER_POSTGRES: PostgresSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env":True}, ) - RESOURCE_USAGE_TRACKER_REDIS: RedisSettings = Field(auto_default_from_env=True) + RESOURCE_USAGE_TRACKER_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env":True}, + ) RESOURCE_USAGE_TRACKER_RABBITMQ: RabbitSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env":True}, ) @@ -109,4 +117,6 @@ class ApplicationSettings(MinimalApplicationSettings): description="Heartbeat couter limit when RUT considers service as unhealthy.", ) RESOURCE_USAGE_TRACKER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - RESOURCE_USAGE_TRACKER_S3: S3Settings | None = Field(auto_default_from_env=True) + RESOURCE_USAGE_TRACKER_S3: S3Settings | None = Field( + json_schema_extra={"auto_default_from_env":True}, + ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py index a264f90d375..88c49c10713 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_credit_transactions.py @@ -13,7 +13,7 @@ ) from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel +from pydantic import ConfigDict, BaseModel class CreditTransactionCreate(BaseModel): @@ -64,6 +64,4 @@ class CreditTransactionDB(BaseModel): created: datetime last_heartbeat_at: datetime modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py index f946c92e5d9..1c17a433855 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_plans.py @@ -2,7 +2,7 @@ from models_library.resource_tracker import PricingPlanClassification, PricingPlanId from models_library.services import ServiceKey, ServiceVersion -from pydantic import BaseModel +from pydantic import ConfigDict, BaseModel ## DB Models @@ -15,16 +15,12 @@ class PricingPlansDB(BaseModel): is_active: bool created: datetime pricing_plan_key: str - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class PricingPlansWithServiceDefaultPlanDB(PricingPlansDB): service_default_plan: bool - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class PricingPlanToServiceDB(BaseModel): @@ -32,6 +28,4 @@ class PricingPlanToServiceDB(BaseModel): service_key: ServiceKey service_version: ServiceVersion created: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py index b5fa3daadf0..23fb94cf7cf 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_unit_costs.py @@ -6,7 +6,7 @@ PricingUnitCostId, PricingUnitId, ) -from pydantic import BaseModel +from pydantic import ConfigDict, BaseModel class PricingUnitCostsDB(BaseModel): @@ -21,6 +21,4 @@ class PricingUnitCostsDB(BaseModel): created: datetime comment: str | None modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py index f0fed877d43..5a0a66dc542 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_pricing_units.py @@ -9,7 +9,7 @@ PricingUnitId, UnitExtraInfo, ) -from pydantic import BaseModel, validator +from pydantic import field_validator, ConfigDict, BaseModel class PricingUnitsDB(BaseModel): @@ -23,11 +23,9 @@ class PricingUnitsDB(BaseModel): modified: datetime current_cost_per_unit: Decimal current_cost_per_unit_id: PricingUnitCostId + model_config = ConfigDict(from_attributes=True) - class Config: - orm_mode = True - - @validator("specific_info", pre=True) + @field_validator("specific_info", mode="before") @classmethod def default_hardware_info_when_empty(cls, v) -> HardwareInfo | Any: if not v: diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py index 45cddca3057..011da17901f 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/models/resource_tracker_service_runs.py @@ -16,7 +16,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, NonNegativeInt +from pydantic import ConfigDict, BaseModel, NonNegativeInt class ServiceRunCreate(BaseModel): @@ -93,25 +93,19 @@ class ServiceRunDB(BaseModel): last_heartbeat_at: datetime service_run_status_msg: str | None missed_heartbeat_counter: NonNegativeInt - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class ServiceRunWithCreditsDB(ServiceRunDB): - osparc_credits: Decimal | None + osparc_credits: Decimal | None = None transaction_status: CreditTransactionStatus | None - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class OsparcCreditsAggregatedByServiceKeyDB(BaseModel): osparc_credits: Decimal service_key: ServiceKey - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class ServiceRunForCheckDB(BaseModel): @@ -119,6 +113,4 @@ class ServiceRunForCheckDB(BaseModel): last_heartbeat_at: datetime missed_heartbeat_counter: NonNegativeInt modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) From 90f045b90685e31a6476d4ce227be2958b079c96 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 22:26:41 +0200 Subject: [PATCH 135/280] upgrade requirements --- services/efs-guardian/requirements/_base.txt | 53 ++++++++++--------- services/efs-guardian/requirements/_test.txt | 31 +++++------ services/efs-guardian/requirements/_tools.txt | 10 ---- 3 files changed, 42 insertions(+), 52 deletions(-) diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index 0559ab670cf..551a43f5c07 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -43,6 +43,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -54,13 +56,10 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==23.2.0 # via # aiohttp @@ -99,24 +98,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.1 - # via anyio fast-depends==2.4.3 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -203,7 +188,7 @@ prometheus-client==0.20.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -214,7 +199,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -231,6 +215,26 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -241,6 +245,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -302,7 +308,7 @@ sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -352,16 +358,15 @@ typing-extensions==4.11.0 # via # aiodebug # aiodocker - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm - # uvicorn urllib3==2.2.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index 847b0b88781..e2086e769ae 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -11,6 +11,10 @@ aiosignal==1.3.1 # via # -c requirements/_base.txt # aiohttp +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.4.0 @@ -19,11 +23,6 @@ anyio==4.4.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp - # redis attrs==23.2.0 # via # -c requirements/_base.txt @@ -82,11 +81,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.1 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in fakeredis==2.23.5 @@ -205,11 +199,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -235,7 +233,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../requirements/constraints.txt @@ -296,19 +296,14 @@ sortedcontainers==2.4.0 # via fakeredis sympy==1.13.2 # via cfn-lint -tomli==2.0.1 - # via - # coverage - # pytest typing-extensions==4.11.0 # via # -c requirements/_base.txt # aiodocker - # anyio # aws-sam-translator # cfn-lint - # fakeredis # pydantic + # pydantic-core urllib3==2.2.1 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt index db86636a373..7a2c3f9d91b 100644 --- a/services/efs-guardian/requirements/_tools.txt +++ b/services/efs-guardian/requirements/_tools.txt @@ -74,22 +74,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.11.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 4dda24d05927087cc4c5138f39af7e60b6be510c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 22:29:28 +0200 Subject: [PATCH 136/280] run bump-pydantic --- .../core/settings.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index 57528d7a13d..2b62ca16931 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -8,7 +8,7 @@ LogLevel, VersionTag, ) -from pydantic import Field, PositiveInt, validator +from pydantic import AliasChoices, Field, PositiveInt, field_validator from settings_library.base import BaseCustomSettings from settings_library.efs import AwsEfsSettings from settings_library.rabbit import RabbitSettings @@ -59,28 +59,35 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- EFS_GUARDIAN_DEBUG: bool = Field( - default=False, description="Debug mode", env=["EFS_GUARDIAN_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("EFS_GUARDIAN_DEBUG", "DEBUG"), ) EFS_GUARDIAN_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices("EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) - EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field(auto_default_from_env=True) - EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) + EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) + EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) @cached_property def LOG_LEVEL(self) -> LogLevel: # noqa: N802 return self.EFS_GUARDIAN_LOGLEVEL - @validator("EFS_GUARDIAN_LOGLEVEL") + @field_validator("EFS_GUARDIAN_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) From 9c71e51f32ad22ab2f6255663fcc930c0428c0d8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 22:55:04 +0200 Subject: [PATCH 137/280] upgrade requirements --- .../datcore-adapter/requirements/_base.txt | 42 ++++++++++--------- .../datcore-adapter/requirements/_test.txt | 10 ----- .../datcore-adapter/requirements/_tools.txt | 10 ----- 3 files changed, 23 insertions(+), 39 deletions(-) diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index b384e50d8b3..d970c7c1644 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -23,6 +23,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -35,10 +37,6 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==23.2.0 # via # aiohttp @@ -68,18 +66,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # fastapi-pagination @@ -158,12 +148,11 @@ prometheus-client==0.20.0 # prometheus-fastapi-instrumentator prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -176,6 +165,20 @@ pydantic==1.10.14 # fast-depends # fastapi # fastapi-pagination + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -185,7 +188,9 @@ python-dateutil==2.9.0.post0 # arrow # botocore python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn python-multipart==0.0.9 # via -r requirements/_base.in pyyaml==6.0.1 @@ -231,7 +236,7 @@ sniffio==1.3.1 # via # anyio # httpx -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -257,13 +262,12 @@ typing-extensions==4.10.0 # via # aiodebug # aiodocker - # anyio # fastapi # fastapi-pagination # faststream # pydantic + # pydantic-core # typer - # uvicorn urllib3==2.2.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt index f006d59e4ed..354bbbf15e0 100644 --- a/services/datcore-adapter/requirements/_test.txt +++ b/services/datcore-adapter/requirements/_test.txt @@ -23,11 +23,6 @@ coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # anyio - # pytest execnet==2.1.1 # via pytest-xdist faker==27.0.0 @@ -111,10 +106,6 @@ sniffio==1.3.1 # httpx termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest types-awscrt==0.21.2 # via botocore-stubs types-boto3==1.0.2 @@ -126,7 +117,6 @@ types-s3transfer==0.10.1 typing-extensions==4.10.0 # via # -c requirements/_base.txt - # anyio # boto3-stubs urllib3==2.2.1 # via diff --git a/services/datcore-adapter/requirements/_tools.txt b/services/datcore-adapter/requirements/_tools.txt index b833085b882..aeba68b6f13 100644 --- a/services/datcore-adapter/requirements/_tools.txt +++ b/services/datcore-adapter/requirements/_tools.txt @@ -69,22 +69,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 7a20ef78dbe63a01d0011cb7601e867eed9eacbf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 22:57:36 +0200 Subject: [PATCH 138/280] run bump-pydantic --- .../core/settings.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py index a550589571f..d0fef4014cb 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py @@ -1,7 +1,7 @@ from functools import cached_property from models_library.basic_types import BootModeEnum, LogLevel -from pydantic import Field, parse_obj_as, validator +from pydantic import AliasChoices, Field, TypeAdapter, field_validator from pydantic.networks import AnyUrl from settings_library.base import BaseCustomSettings from settings_library.utils_logging import MixinLoggingSettings @@ -10,7 +10,9 @@ class PennsieveSettings(BaseCustomSettings): PENNSIEVE_ENABLED: bool = True - PENNSIEVE_API_URL: AnyUrl = parse_obj_as(AnyUrl, "https://api.pennsieve.io") + PENNSIEVE_API_URL: AnyUrl = TypeAdapter(AnyUrl).validate_python( + "https://api.pennsieve.io" + ) PENNSIEVE_API_GENERAL_TIMEOUT: float = 20.0 PENNSIEVE_HEALTCHCHECK_TIMEOUT: float = 1.0 @@ -21,22 +23,24 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=[ + validation_alias=AliasChoices( "DATCORE_ADAPTER_LOGLEVEL", "DATCORE_ADAPTER_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL", - ], + ), ) - PENNSIEVE: PennsieveSettings = Field(auto_default_from_env=True) + PENNSIEVE: PennsieveSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( False, - env=[ + validation_alias=AliasChoices( "DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) DATCORE_ADAPTER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True @@ -50,7 +54,7 @@ def debug(self) -> bool: BootModeEnum.LOCAL, ] - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value) -> str: return cls.validate_log_level(value) From 3e2cd3dd6d09e8983f4dc65c6e7054431ea33102 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 23:12:45 +0200 Subject: [PATCH 139/280] upgrade requirements --- services/dask-sidecar/requirements/_base.txt | 31 +++++++++++++------ services/dask-sidecar/requirements/_test.txt | 19 ++++++------ services/dask-sidecar/requirements/_tools.txt | 10 ------ 3 files changed, 32 insertions(+), 28 deletions(-) diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index cca6d21ea7a..c2387002090 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -34,6 +34,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -44,10 +46,6 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==23.2.0 # via # aiohttp @@ -88,8 +86,6 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.1 - # via anyio fast-depends==2.4.2 # via faststream faststream==0.5.10 @@ -187,7 +183,7 @@ prometheus-client==0.20.0 # via -r requirements/_base.in psutil==5.9.8 # via distributed -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -208,6 +204,23 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -218,7 +231,7 @@ python-dateutil==2.9.0.post0 # botocore # pandas python-dotenv==1.0.1 - # via pydantic + # via pydantic-settings pytz==2024.1 # via pandas pyyaml==6.0.1 @@ -303,9 +316,9 @@ typing-extensions==4.11.0 # via # aiodebug # aiodocker - # anyio # faststream # pydantic + # pydantic-core # typer tzdata==2024.1 # via pandas diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index 787ef0f3be8..beb70a4ba79 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto attrs==23.2.0 @@ -50,10 +54,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.1 - # via - # -c requirements/_base.txt - # pytest faker==27.0.0 # via -r requirements/_test.in flask==3.0.3 @@ -142,11 +142,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyftpdlib==1.5.10 # via pytest-localftpserver pyopenssl==24.2.1 @@ -234,10 +238,6 @@ sympy==1.13.2 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in typing-extensions==4.11.0 @@ -246,6 +246,7 @@ typing-extensions==4.11.0 # aws-sam-translator # cfn-lint # pydantic + # pydantic-core urllib3==2.2.1 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index db86636a373..7a2c3f9d91b 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -74,22 +74,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.11.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From ca1676e6ee44b64dbb012167997f07695c8ede54 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 27 Sep 2024 23:15:57 +0200 Subject: [PATCH 140/280] run bump-pydantic --- .../computational_sidecar/models.py | 21 +++++++++---------- .../simcore_service_dask_sidecar/settings.py | 12 +++++++---- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py index 691192716e9..24c5028b147 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py @@ -3,7 +3,7 @@ from models_library.basic_regex import SIMPLE_VERSION_RE from models_library.services import ServiceMetaDataPublished from packaging import version -from pydantic import BaseModel, ByteSize, Extra, Field, validator +from pydantic import BaseModel, ByteSize, ConfigDict, Field, field_validator LEGACY_INTEGRATION_VERSION = version.Version("0") PROGRESS_REGEXP: re.Pattern[str] = re.compile( @@ -36,12 +36,13 @@ class ContainerHostConfig(BaseModel): default=None, alias="MemorySwap", description="Total memory limit (memory + swap). Set as -1 to enable unlimited swap.", + validate_default=True, ) nano_cpus: int = Field( ..., alias="NanoCPUs", description="CPU quota in units of 10-9 CPUs" ) - @validator("memory_swap", pre=True, always=True) + @field_validator("memory_swap", mode="before") @classmethod def ensure_no_memory_swap_means_no_swap(cls, v, values): if v is None: @@ -49,7 +50,7 @@ def ensure_no_memory_swap_means_no_swap(cls, v, values): return values["memory"] return v - @validator("memory_swap") + @field_validator("memory_swap") @classmethod def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values): if v < values["memory"]: @@ -71,7 +72,7 @@ class ImageLabels(BaseModel): default=str(LEGACY_INTEGRATION_VERSION), alias="integration-version", description="integration version number", - regex=SIMPLE_VERSION_RE, + pattern=SIMPLE_VERSION_RE, examples=["1.0.0"], ) progress_regexp: str = Field( @@ -79,18 +80,16 @@ class ImageLabels(BaseModel): alias="progress_regexp", description="regexp pattern for detecting computational service's progress", ) + model_config = ConfigDict(extra="ignore") - class Config: - extra = Extra.ignore - - @validator("integration_version", pre=True) + @field_validator("integration_version", mode="before") @classmethod def default_integration_version(cls, v): if v is None: return ImageLabels().integration_version return v - @validator("progress_regexp", pre=True) + @field_validator("progress_regexp", mode="before") @classmethod def default_progress_regexp(cls, v): if v is None: @@ -104,6 +103,6 @@ def get_progress_regexp(self) -> re.Pattern[str]: return re.compile(self.progress_regexp) -assert set(ImageLabels.__fields__).issubset( - ServiceMetaDataPublished.__fields__ +assert set(ImageLabels.model_fields).issubset( + ServiceMetaDataPublished.model_fields ), "ImageLabels must be compatible with ServiceDockerData" diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py index 80661c7ecb2..32dcdf2834d 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py @@ -2,7 +2,7 @@ from typing import Any from models_library.basic_types import LogLevel -from pydantic import Field, validator +from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings from settings_library.utils_logging import MixinLoggingSettings @@ -14,7 +14,9 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): SC_BOOT_MODE: str | None = None LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=["DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DASK_SIDECAR_LOGLEVEL", "SIDECAR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) # sidecar config --- @@ -37,7 +39,9 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices( + "DASK_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -50,7 +54,7 @@ def as_worker(self) -> bool: assert self.DASK_SCHEDULER_HOST is not None # nosec return as_worker - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: Any) -> str: return cls.validate_log_level(f"{value}") From c13833945635858061c547f752d0dcf52d81825a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Sun, 29 Sep 2024 23:09:41 +0200 Subject: [PATCH 141/280] upgrade requirements --- services/api-server/requirements/_base.txt | 23 +++++++++++++++++----- services/api-server/requirements/_test.txt | 4 ---- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index cdf22802085..2665017fcb3 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -278,13 +278,12 @@ psycopg2-binary==2.9.9 # sqlalchemy pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -314,12 +313,26 @@ pydantic==2.9.1 # fastapi-pagination # pydantic-extra-types # pydantic-settings -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pydantic-extra-types==2.9.0 - # via fastapi + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # fastapi pydantic-settings==2.5.2 - # via fastapi + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # fastapi pygments==2.17.2 # via rich pyinstrument==4.6.2 diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 2c7fec18f12..323a41f35d6 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -102,10 +102,6 @@ frozenlist==1.4.1 # aiosignal graphql-core==3.2.3 # via moto -greenlet==3.0.3 - # via - # -c requirements/_base.txt - # sqlalchemy h11==0.14.0 # via # -c requirements/_base.txt From cd1fbc1cd678402b5c84fc4749b61ce7faef2866 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 10:29:53 +0200 Subject: [PATCH 142/280] fix mypy --- .../simcore_sdk/node_ports_common/r_clone.py | 2 +- .../src/simcore_sdk/node_ports_v2/port.py | 25 ++++++++++--------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index fbbcf86d433..659e42d4c0a 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -260,7 +260,7 @@ async def sync_local_to_s3( """ _raise_if_directory_is_file(local_directory_path) - upload_s3_path = re.sub(r"^s3://", "", upload_s3_link) + upload_s3_path = re.sub(r"^s3://", "", str(upload_s3_link)) _logger.debug(" %s; %s", f"{upload_s3_link=}", f"{upload_s3_path=}") await _sync_sources( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 762e6b4e383..0a5f4d22d62 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -15,10 +15,11 @@ ConfigDict, Field, PrivateAttr, + TypeAdapter, ValidationError, + ValidationInfo, field_validator, ) -from pydantic.tools import parse_obj_as from servicelib.progress_bar import ProgressBarData from ..node_ports_common.exceptions import ( @@ -63,7 +64,7 @@ def _check_if_symlink_is_valid(symlink: Path) -> None: def can_parse_as(v, *types) -> bool: try: for type_ in types: - parse_obj_as(type_, v) + TypeAdapter(type_).validate_python(v) return True except ValidationError: return False @@ -101,10 +102,10 @@ class Port(BaseServiceIOModel): @field_validator("value") @classmethod - def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: + def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: if ( v is not None - and (property_type := values.get("property_type")) + and (property_type := info.data.get("property_type")) and not isinstance(v, PortLink) ): if port_utils.is_file_type(property_type): @@ -114,10 +115,10 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: ) elif property_type == "ref_contentSchema": v, _ = validate_port_content( - port_key=values.get("key"), + port_key=info.data.get("key"), value=v, unit=None, - content_schema=values.get("content_schema", {}), + content_schema=info.data.get("content_schema", {}), ) elif isinstance(v, (list, dict)): raise TypeError( @@ -127,19 +128,19 @@ def check_value(cls, v: DataItemValue, values: dict[str, Any]) -> DataItemValue: @field_validator("value_item", "value_concrete", mode="before") @classmethod - def check_item_or_concrete_value(cls, v, values): + def check_item_or_concrete_value(cls, v, info: ValidationInfo): if ( v - and v != values["value"] - and (property_type := values.get("property_type")) + and v != info.data["value"] + and (property_type := info.data.get("property_type")) and property_type == "ref_contentSchema" and not can_parse_as(v, Path, AnyUrl) ): v, _ = validate_port_content( - port_key=values.get("key"), + port_key=info.data.get("key"), value=v, unit=None, - content_schema=values.get("content_schema", {}), + content_schema=info.data.get("content_schema", {}), ) return v @@ -215,7 +216,7 @@ async def _evaluate() -> ItemValue | None: if isinstance(self.value, DownloadLink): # generic download link for a file - url: AnyUrl = self.value.download_link + url: AnyUrl = TypeAdapter(AnyUrl).validate_python(self.value.download_link) return url # otherwise, this is a BasicValueTypes From 4a29d2e0771a891573dee15b87339becf1ed5f7a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 10:56:05 +0200 Subject: [PATCH 143/280] fix mypy --- .../src/simcore_sdk/node_ports_v2/nodeports_v2.py | 1 - .../src/simcore_sdk/node_ports_v2/port.py | 6 ++++-- .../simcore_sdk/node_ports_v2/port_validation.py | 14 +++++++------- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 23b60399d7b..edf24e6e297 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -10,7 +10,6 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from pydantic import BaseModel, ConfigDict, Field, ValidationError -from pydantic.error_wrappers import flatten_errors from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 0a5f4d22d62..f3a09c2b577 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -98,7 +98,7 @@ class Port(BaseServiceIOModel): # flags _used_default_value: bool = PrivateAttr(False) - model_configg = ConfigDict(validate_assignment=True) + model_config = ConfigDict(validate_assignment=True) @field_validator("value") @classmethod @@ -216,7 +216,9 @@ async def _evaluate() -> ItemValue | None: if isinstance(self.value, DownloadLink): # generic download link for a file - url: AnyUrl = TypeAdapter(AnyUrl).validate_python(self.value.download_link) + url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + self.value.download_link + ) return url # otherwise, this is a BasicValueTypes diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py index c2ebb56986d..3fc0d97e2a0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py @@ -1,7 +1,8 @@ import logging import re -from typing import Any, Dict, Optional, Tuple +from typing import Any +from models_library.errors_classes import OsparcErrorMixin from models_library.projects_nodes import UnitStr from models_library.utils.json_schema import ( JsonSchemaValidationError, @@ -9,9 +10,8 @@ jsonschema_validate_schema, ) from pint import PintError, UnitRegistry -from pydantic.errors import PydanticValueError -JsonSchemaDict = Dict[str, Any] +JsonSchemaDict = dict[str, Any] log = logging.getLogger(__name__) @@ -22,7 +22,7 @@ # - Use 'code' to discriminate port_validation errors -class PortValueError(PydanticValueError): +class PortValueError(OsparcErrorMixin, RuntimeError): code = "port_validation.schema_error" msg_template = "Invalid value in port {port_key!r}: {schema_error_message}" @@ -37,7 +37,7 @@ def __init__(self, *, port_key: str, schema_error: JsonSchemaValidationError): ) -class PortUnitError(PydanticValueError): +class PortUnitError(OsparcErrorMixin, RuntimeError): code = "port_validation.unit_error" msg_template = "Invalid unit in port {port_key!r}: {pint_error_msg}" @@ -72,7 +72,7 @@ def _validate_port_value(value, content_schema: JsonSchemaDict): def _validate_port_unit( value, unit, content_schema: JsonSchemaDict, *, ureg: UnitRegistry -) -> Tuple[Any, Optional[UnitStr]]: +) -> tuple[Any, UnitStr | None]: """ - Checks valid 'value' against content_schema - Converts 'value' with 'unit' to unit expected in content_schema @@ -101,7 +101,7 @@ def _validate_port_unit( def validate_port_content( port_key, value: Any, - unit: Optional[UnitStr], + unit: UnitStr | None, content_schema: JsonSchemaDict, ): """A port content is all datasets injected to a given port. Currently only From 532206eaa35136dcea9a64787399dc497518487e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 11:10:54 +0200 Subject: [PATCH 144/280] fix rootmodel --- .../node_ports_v2/ports_mapping.py | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py index 2855e8a253e..611684df7cf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py @@ -1,38 +1,36 @@ from collections.abc import ItemsView, Iterator, KeysView, ValuesView from models_library.services_types import ServicePortKey -from pydantic import BaseModel +from pydantic import RootModel from ..node_ports_common.exceptions import UnboundPortError from .port import Port -class BasePortsMapping(BaseModel): - __root__: dict[ServicePortKey, Port] - +class BasePortsMapping(RootModel[dict[ServicePortKey, Port]]): def __getitem__(self, key: int | ServicePortKey) -> Port: if isinstance(key, int): - if key < len(self.__root__): - key = list(self.__root__.keys())[key] - if key not in self.__root__: + if key < len(self.root): + key = list(self.root.keys())[key] + if key not in self.root: raise UnboundPortError(key) assert isinstance(key, str) # nosec - return self.__root__[key] + return self.root[key] def __iter__(self) -> Iterator[ServicePortKey]: # type: ignore - return iter(self.__root__) + return iter(self.root) def keys(self) -> KeysView[ServicePortKey]: - return self.__root__.keys() + return self.root.keys() def items(self) -> ItemsView[ServicePortKey, Port]: - return self.__root__.items() + return self.root.items() def values(self) -> ValuesView[Port]: - return self.__root__.values() + return self.root.values() def __len__(self) -> int: - return self.__root__.__len__() + return self.root.__len__() class InputsList(BasePortsMapping): From 2cddbd80319735dec98182b6a662da7fc5229197 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 12:38:45 +0200 Subject: [PATCH 145/280] continue upgrading --- .../src/models_library/projects_nodes_io.py | 13 ++++++------ .../simcore_sdk/node_ports_v2/nodeports_v2.py | 2 +- .../src/simcore_sdk/node_ports_v2/port.py | 2 +- .../tests/helpers/utils_port_v2.py | 2 +- .../test_node_ports_common_file_io_utils.py | 13 ++++++++---- .../tests/unit/test_node_ports_v2_port.py | 20 +++++++++---------- .../unit/test_node_ports_v2_port_mapping.py | 10 +++++----- .../tests/unit/test_storage_client.py | 12 +++++------ 8 files changed, 39 insertions(+), 35 deletions(-) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 876c2f71744..e1f7cee7376 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -40,8 +40,9 @@ LocationName = str -class SimcoreS3FileID(ConstrainedStr): - pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_FILE_ID_RE) +SimcoreS3FileID: TypeAlias = Annotated[ + str, StringConstraints(pattern=SIMCORE_S3_FILE_ID_RE) +] class SimcoreS3DirectoryID(ConstrainedStr): @@ -87,9 +88,7 @@ def from_simcore_s3_object(cls, s3_object: str) -> "SimcoreS3DirectoryID": return TypeAdapter(cls).validate_python(f"{parent_path}/") -class DatCoreFileID(ConstrainedStr): - regex: re.Pattern[str] | None = re.compile(DATCORE_FILE_ID_RE) - +DatCoreFileID: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_FILE_ID_RE)] StorageFileID: TypeAlias = SimcoreS3FileID | DatCoreFileID @@ -123,7 +122,7 @@ class PortLink(BaseModel): class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" - download_link: Annotated[str, AnyUrl] = Field(..., alias="downloadLink") + download_link: AnyUrl = Field(..., alias="downloadLink") label: str | None = Field(default=None, description="Display name") model_config = ConfigDict( extra="forbid", @@ -145,11 +144,13 @@ class BaseFileLink(BaseModel): store: LocationID = Field( ..., description="The store identifier: 0 for simcore S3, 1 for datcore", + validate_default=True, ) path: StorageFileID = Field( ..., description="The path to the file in the storage provider domain", + union_mode="left_to_right", ) label: str | None = Field( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index edf24e6e297..bae02ab24d3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -44,7 +44,7 @@ class Nodeports(BaseModel): ] auto_update: bool = False r_clone_settings: RCloneSettings | None = None - io_log_redirect_cb: LogRedirectCB | None + io_log_redirect_cb: LogRedirectCB | None = None aws_s3_cli_settings: AwsS3CliSettings | None = None model_config = ConfigDict( arbitrary_types_allowed=True, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index f3a09c2b577..3338280a89c 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -80,7 +80,7 @@ class Port(BaseServiceIOModel): widget: dict[str, Any] | None = None default_value: DataItemValue | None = Field(None, alias="defaultValue") - value: DataItemValue | None = Field(None, validate_default=True) + value: DataItemValue | None = Field(None, validate_default=True, union_mode="left_to_right") # Different states of "value" # - e.g. typically after resolving a port's link, a download link, ... diff --git a/packages/simcore-sdk/tests/helpers/utils_port_v2.py b/packages/simcore-sdk/tests/helpers/utils_port_v2.py index 556e0eb4ced..23298f6b175 100644 --- a/packages/simcore-sdk/tests/helpers/utils_port_v2.py +++ b/packages/simcore-sdk/tests/helpers/utils_port_v2.py @@ -45,5 +45,5 @@ def create_valid_port_mapping( key=key_for_file_port, fileToKeyMap={file_to_key: key_for_file_port} if file_to_key else None, ) - port_mapping = mapping_class(**{"__root__": port_cfgs}) + port_mapping = mapping_class(**{"root": port_cfgs}) return port_mapping diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index 2d32d345ffa..a587aade5bb 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -19,7 +19,7 @@ UploadedPart, ) from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter, parse_obj_as from pytest_mock import MockerFixture from servicelib.aiohttp import status from servicelib.progress_bar import ProgressBarData @@ -234,8 +234,8 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch chunk_size=chunk_size, urls=upload_links, links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, faker.uri()), - complete_upload=parse_obj_as(AnyUrl, faker.uri()), + abort_upload=TypeAdapter(AnyUrl).validate_python(faker.uri()), + complete_upload=TypeAdapter(AnyUrl).validate_python(faker.uri()), ), ) @@ -245,7 +245,12 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch @pytest.mark.skip(reason="this will allow to reproduce an issue") @pytest.mark.parametrize( "file_size,used_chunk_size", - [(parse_obj_as(ByteSize, 21800510238), parse_obj_as(ByteSize, 10485760))], + [ + ( + TypeAdapter(ByteSize).validate_python(21800510238), + TypeAdapter(ByteSize).validate_python(10485760), + ) + ], ) async def test_upload_file_to_presigned_links( client_session: ClientSession, diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 125bd02d45d..26cf9a4d360 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -22,8 +22,7 @@ from faker import Faker from models_library.api_schemas_storage import FileMetaDataGet from models_library.projects_nodes_io import LocationID -from pydantic import parse_obj_as -from pydantic.error_wrappers import ValidationError +from pydantic import TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture from servicelib.progress_bar import ProgressBarData from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB @@ -218,8 +217,7 @@ def e_tag_fixture() -> str: async def mock_filemanager(mocker: MockerFixture, e_tag: str, faker: Faker) -> None: mocker.patch( "simcore_sdk.node_ports_common.filemanager._get_file_meta_data", - return_value=parse_obj_as( - FileMetaDataGet, + return_value=TypeAdapter(FileMetaDataGet).validate_python( FileMetaDataGet.model_config["json_schema_extra"]["examples"][0], ), ) @@ -320,7 +318,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/no_file/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "no_file" @@ -343,7 +341,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/no_file_with_default/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "no_file_with_default" @@ -431,7 +429,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/some_file_on_datcore/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "some_file_on_datcore" @@ -460,7 +458,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/download_link/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "download_link" @@ -492,7 +490,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/download_link_with_file_to_key/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "download_link_with_file_to_key" @@ -523,7 +521,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/file_port_link/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "file_port_link" @@ -557,7 +555,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/file_port_link_with_file_to_key_map/{this_node_file_name().name}", - e_tag=e_tag(), + eTag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "file_port_link_with_file_to_key_map" diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py index 28cb01894a1..111b5eb69ba 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py @@ -18,7 +18,7 @@ @pytest.mark.parametrize("port_class", [InputsList, OutputsList]) def test_empty_ports_mapping(port_class: type[InputsList | OutputsList]): - port_mapping = port_class(__root__={}) + port_mapping = port_class(root={}) assert not port_mapping.items() assert not port_mapping.values() assert not port_mapping.keys() @@ -36,10 +36,10 @@ def test_filled_ports_mapping(port_class: type[InputsList | OutputsList]): port_cfgs[port["key"]] = port port_cfgs["some_file"] = create_valid_port_config("data:*/*", key="some_file") - port_mapping = port_class(__root__=port_cfgs) + port_mapping = port_class(root=port_cfgs) # two ways to construct instances of __root__ - assert port_class.parse_obj(port_cfgs) == port_mapping + assert port_class.model_validate(port_cfgs) == port_mapping assert len(port_mapping) == len(port_cfgs) for port_key, port_value in port_mapping.items(): @@ -61,8 +61,8 @@ def test_filled_ports_mapping(port_class: type[InputsList | OutputsList]): def test_io_ports_are_not_aliases(): # prevents creating alises as InputsList = PortsMappings - inputs = InputsList(__root__={}) - outputs = OutputsList(__root__={}) + inputs = InputsList(root={}) + outputs = OutputsList(root={}) assert isinstance(inputs, InputsList) assert not isinstance(inputs, OutputsList) diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index fdeefc66dda..7aa9d90bdf4 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -20,7 +20,7 @@ ) from models_library.projects_nodes_io import SimcoreS3FileID from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.aiohttp import status from simcore_sdk.node_ports_common import exceptions @@ -176,7 +176,7 @@ async def test_get_file_metada( session=session, file_id=file_id, location_id=location_id, user_id=user_id ) assert file_metadata - assert file_metadata == FileMetaDataGet.parse_obj( + assert file_metadata == FileMetaDataGet.model_validate( FileMetaDataGet.model_config["json_schema_extra"]["examples"][0] ) @@ -362,12 +362,12 @@ def test_mode_ports_storage_without_auth( [ (True, _HTTP_URL, _HTTPS_URL), (False, _HTTP_URL, _HTTP_URL), - (True, parse_obj_as(AnyUrl, _HTTP_URL), _HTTPS_URL), - (False, parse_obj_as(AnyUrl, _HTTP_URL), _HTTP_URL), + (True, TypeAdapter(AnyUrl).validate_python(_HTTP_URL), _HTTPS_URL), + (False, TypeAdapter(AnyUrl).validate_python(_HTTP_URL), _HTTP_URL), (True, _HTTPS_URL, _HTTPS_URL), (False, _HTTPS_URL, _HTTPS_URL), - (True, parse_obj_as(AnyUrl, _HTTPS_URL), _HTTPS_URL), - (False, parse_obj_as(AnyUrl, _HTTPS_URL), _HTTPS_URL), + (True, TypeAdapter(AnyUrl).validate_python(_HTTPS_URL), _HTTPS_URL), + (False, TypeAdapter(AnyUrl).validate_python(_HTTPS_URL), _HTTPS_URL), (True, "http://http", "https://http"), (True, "https://http", "https://http"), ], From 32a5453a9c87579d9cf5edcad12602dda2d37f74 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 15:46:39 +0200 Subject: [PATCH 146/280] continue upgrading --- .../src/models_library/errors_classes.py | 6 +++-- .../src/simcore_sdk/node_data/data_manager.py | 6 +++-- .../src/simcore_sdk/node_ports_v2/port.py | 14 +++++++--- .../node_ports_v2/port_validation.py | 4 +-- .../unit/test_node_ports_v2_port_mapping.py | 19 +++++++------- .../test_node_ports_v2_port_validation.py | 4 +-- .../tests/unit/test_storage_client.py | 26 +++++++++++++++---- 7 files changed, 51 insertions(+), 28 deletions(-) diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/models-library/src/models_library/errors_classes.py index 6f6d9e0c266..c6899084b12 100644 --- a/packages/models-library/src/models_library/errors_classes.py +++ b/packages/models-library/src/models_library/errors_classes.py @@ -1,3 +1,5 @@ +from typing import Any + from pydantic.errors import PydanticErrorMixin @@ -14,8 +16,8 @@ def __new__(cls, *_args, **_kwargs): cls.code = cls._get_full_class_name() # type: ignore[assignment] return super().__new__(cls) - def __init__(self, *_args, **kwargs) -> None: - self.__dict__ = kwargs + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx super().__init__(message=self._build_message(), code=self.code) def __str__(self) -> str: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index 7b8b810ba38..7579c3eeb0c 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.users import UserID -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.archiving_utils import unarchive_dir from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData @@ -25,7 +25,9 @@ def __create_s3_object_key( project_id: ProjectID, node_uuid: NodeID, file_path: Path | str ) -> StorageFileID: file_name = file_path.name if isinstance(file_path, Path) else file_path - return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}") # type: ignore[arg-type] + return TypeAdapter(StorageFileID).validate_python( + f"{project_id}/{node_uuid}/{file_name}" + ) def __get_s3_name(path: Path, *, is_archive: bool) -> str: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 3338280a89c..3ddab6a29d3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -78,17 +78,23 @@ class SetKWargs: class Port(BaseServiceIOModel): key: ServicePortKey widget: dict[str, Any] | None = None - default_value: DataItemValue | None = Field(None, alias="defaultValue") + default_value: DataItemValue | None = Field( + None, alias="defaultValue", union_mode="left_to_right" + ) - value: DataItemValue | None = Field(None, validate_default=True, union_mode="left_to_right") + value: DataItemValue | None = Field( + None, validate_default=True, union_mode="left_to_right" + ) # Different states of "value" # - e.g. typically after resolving a port's link, a download link, ... # - lazy evaluation using get_* members # - used to run validation & conversion of resolved PortContentTypes values # - excluded from all model export - value_item: ItemValue | None = Field(None, exclude=True) - value_concrete: ItemConcreteValue | None = Field(None, exclude=True) + value_item: ItemValue | None = Field(None, exclude=True, union_mode="left_to_right") + value_concrete: ItemConcreteValue | None = Field( + None, exclude=True, union_mode="left_to_right" + ) # Function to convert from ItemValue -> ItemConcreteValue _py_value_converter: Callable[[Any], ItemConcreteValue] = PrivateAttr() diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py index 3fc0d97e2a0..7c4a962121a 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py @@ -22,7 +22,7 @@ # - Use 'code' to discriminate port_validation errors -class PortValueError(OsparcErrorMixin, RuntimeError): +class PortValueError(OsparcErrorMixin, ValueError): code = "port_validation.schema_error" msg_template = "Invalid value in port {port_key!r}: {schema_error_message}" @@ -37,7 +37,7 @@ def __init__(self, *, port_key: str, schema_error: JsonSchemaValidationError): ) -class PortUnitError(OsparcErrorMixin, RuntimeError): +class PortUnitError(OsparcErrorMixin, ValueError): code = "port_validation.unit_error" msg_template = "Invalid unit in port {port_key!r}: {pint_error_msg}" diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py index 111b5eb69ba..3746520f42c 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_mapping.py @@ -110,13 +110,13 @@ def test_validate_port_value_against_schema(fake_port_meta: dict[str, Any]): assert error["loc"] == ("value",) assert "-2 is less than the minimum of 0" in error["msg"] - assert error["type"] == "value_error.port_validation.schema_error" + assert error["type"] == "value_error" assert "ctx" in error - assert error["ctx"]["port_key"] == "port_1" + assert error["ctx"]["error"].port_key == "port_1" - schema_error_message = error["ctx"]["schema_error_message"] - schema_error_path = error["ctx"]["schema_error_path"] + schema_error_message = error["ctx"]["error"].schema_error_message + schema_error_path = error["ctx"]["error"].schema_error_path assert schema_error_message in error["msg"] assert schema_error_path == deque([1]) @@ -152,7 +152,7 @@ def test_validate_iolist_against_schema(fake_port_meta: dict[str, Any]): # ---- with pytest.raises(ValidationError) as err_info: - InputsList.parse_obj({p["key"]: p for p in ports}) + InputsList.model_validate({p["key"]: p for p in ports}) # --- assert isinstance(err_info.value, ValidationError) @@ -162,14 +162,13 @@ def test_validate_iolist_against_schema(fake_port_meta: dict[str, Any]): for error in err_info.value.errors(): error_loc = error["loc"] assert "ctx" in error - port_key = error["ctx"].get("port_key") + port_key = error["ctx"]["error"].port_key # path hierachy - assert error_loc[0] == "__root__", f"{error_loc=}" - assert error_loc[1] == port_key, f"{error_loc=}" - assert error_loc[-1] == "value", f"{error_loc=}" + assert error_loc[0] == port_key, f"{error_loc=}" + assert error_loc[1] == "value", f"{error_loc=}" - assert error["type"] == "value_error.port_validation.schema_error" + assert error["type"] == "value_error" port_with_errors.append(port_key) pprint(error) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py index a03b86bcffc..bfaa762c3a8 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py @@ -245,7 +245,7 @@ async def test_port_with_units_and_constraints(mocker): print(validation_error) assert validation_error["loc"] == ("value",) # starts with value,! - assert validation_error["type"] == "value_error.port_validation.schema_error" + assert validation_error["type"] == "value_error" assert "-3.14 is less than the minimum of 0" in validation_error["msg"] # inits with None + set_value @@ -257,8 +257,6 @@ async def test_port_with_units_and_constraints(mocker): with pytest.raises(ValidationError) as exc_info: await port.set_value(-3.14) - assert exc_info.value.errors()[0] == validation_error - def test_incident__port_validator_check_value(): # SEE incident https://git.speag.com/oSparc/e2e-testing/-/issues/1) diff --git a/packages/simcore-sdk/tests/unit/test_storage_client.py b/packages/simcore-sdk/tests/unit/test_storage_client.py index 7aa9d90bdf4..7786aafe494 100644 --- a/packages/simcore-sdk/tests/unit/test_storage_client.py +++ b/packages/simcore-sdk/tests/unit/test_storage_client.py @@ -362,12 +362,28 @@ def test_mode_ports_storage_without_auth( [ (True, _HTTP_URL, _HTTPS_URL), (False, _HTTP_URL, _HTTP_URL), - (True, TypeAdapter(AnyUrl).validate_python(_HTTP_URL), _HTTPS_URL), - (False, TypeAdapter(AnyUrl).validate_python(_HTTP_URL), _HTTP_URL), + ( + True, + str(TypeAdapter(AnyUrl).validate_python(_HTTP_URL)).rstrip("/"), + _HTTPS_URL, + ), + ( + False, + str(TypeAdapter(AnyUrl).validate_python(_HTTP_URL)).rstrip("/"), + _HTTP_URL, + ), (True, _HTTPS_URL, _HTTPS_URL), (False, _HTTPS_URL, _HTTPS_URL), - (True, TypeAdapter(AnyUrl).validate_python(_HTTPS_URL), _HTTPS_URL), - (False, TypeAdapter(AnyUrl).validate_python(_HTTPS_URL), _HTTPS_URL), + ( + True, + str(TypeAdapter(AnyUrl).validate_python(_HTTPS_URL)).rstrip("/"), + _HTTPS_URL, + ), + ( + False, + str(TypeAdapter(AnyUrl).validate_python(_HTTPS_URL)).rstrip("/"), + _HTTPS_URL, + ), (True, "http://http", "https://http"), (True, "https://http", "https://http"), ], @@ -382,4 +398,4 @@ def test__get_secure_link( is_storage_secure.cache_clear() setenvs_from_dict(monkeypatch, {"STORAGE_SECURE": "1" if storage_secure else "0"}) - assert _get_https_link_if_storage_secure(provided) == expected + assert _get_https_link_if_storage_secure(str(provided)) == expected From 57f65d58cfeb039bbd08043a0662148c9978bcac Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 16:09:52 +0200 Subject: [PATCH 147/280] fix serializer --- .../models-library/src/models_library/projects_nodes_io.py | 5 ++++- packages/models-library/src/models_library/utils/nodes.py | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index e1f7cee7376..b75d7ec4170 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -13,6 +13,7 @@ from models_library.basic_types import ConstrainedStr, KeyIDStr from pydantic import ( + AfterValidator, AnyUrl, BaseModel, ConfigDict, @@ -122,7 +123,9 @@ class PortLink(BaseModel): class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" - download_link: AnyUrl = Field(..., alias="downloadLink") + download_link: Annotated[AnyUrl, AfterValidator(str)] = Field( + ..., alias="downloadLink" + ) label: str | None = Field(default=None, description="Display name") model_config = ConfigDict( extra="forbid", diff --git a/packages/models-library/src/models_library/utils/nodes.py b/packages/models-library/src/models_library/utils/nodes.py index 7f57bae6e89..dd791677d19 100644 --- a/packages/models-library/src/models_library/utils/nodes.py +++ b/packages/models-library/src/models_library/utils/nodes.py @@ -5,7 +5,7 @@ from copy import deepcopy from typing import Any -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter from ..projects import Project from ..projects_nodes_io import NodeID, PortLink, UUIDStr @@ -20,7 +20,7 @@ def project_node_io_payload_cb( async def node_io_payload_cb(node_id: NodeID) -> dict[str, Any]: node_io_payload: dict[str, Any] = {"inputs": None, "outputs": None} - node = project.workbench.get(UUIDStr(node_id)) + node = project.workbench.get(TypeAdapter(UUIDStr).validate_python(node_id)) if node: node_io_payload = {"inputs": node.inputs, "outputs": node.outputs} From 02d950e4085ccd7a33aef97907c7c440841c220a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 16:18:29 +0200 Subject: [PATCH 148/280] fix mypy --- .../src/simcore_sdk/node_ports_v2/port_validation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py index 7c4a962121a..2c0230be5fd 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py @@ -23,7 +23,7 @@ class PortValueError(OsparcErrorMixin, ValueError): - code = "port_validation.schema_error" + code = "port_validation.schema_error" # type: ignore msg_template = "Invalid value in port {port_key!r}: {schema_error_message}" # pylint: disable=useless-super-delegation @@ -38,7 +38,7 @@ def __init__(self, *, port_key: str, schema_error: JsonSchemaValidationError): class PortUnitError(OsparcErrorMixin, ValueError): - code = "port_validation.unit_error" + code = "port_validation.unit_error" # type: ignore msg_template = "Invalid unit in port {port_key!r}: {pint_error_msg}" # pylint: disable=useless-super-delegation From 16a9740a9a861bb128baa745a382c06d8c180832 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 16:22:34 +0200 Subject: [PATCH 149/280] run bump-pydantic --- .../helpers/httpx_calls_capture_parameters.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py index 89783d0591c..25f2abc8cd0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_calls_capture_parameters.py @@ -1,17 +1,15 @@ from typing import Literal -from pydantic import BaseModel, Field, root_validator, validator +from pydantic import field_validator, model_validator, ConfigDict, BaseModel, Field from .httpx_calls_capture_errors import OpenApiSpecError class CapturedParameterSchema(BaseModel): - title: str | None - type_: Literal["str", "int", "float", "bool"] | None = Field( - None, alias="type", optional=True - ) + title: str | None = None + type_: Literal["str", "int", "float", "bool"] | None = Field(None, alias="type") pattern: str | None - format_: Literal["uuid"] | None = Field(None, alias="format", optional=True) + format_: Literal["uuid"] | None = Field(None, alias="format") exclusiveMinimum: bool | None minimum: int | None anyOf: list["CapturedParameterSchema"] | None @@ -22,7 +20,7 @@ class Config: validate_always = True allow_population_by_field_name = True - @validator("type_", pre=True) + @field_validator("type_", mode="before") @classmethod def preprocess_type_(cls, val): if val == "string": @@ -33,7 +31,7 @@ def preprocess_type_(cls, val): val = "bool" return val - @root_validator(pre=False) + @model_validator(mode="after") @classmethod def check_compatibility(cls, values): type_ = values.get("type_") @@ -100,10 +98,7 @@ class CapturedParameter(BaseModel): response_value: str | None = ( None # attribute for storing the params value in a concrete response ) - - class Config: - validate_always = True - allow_population_by_field_name = True + model_config = ConfigDict(validate_default=True, populate_by_name=True) def __hash__(self): return hash( From 04509d3ab1d6f0c4fec03d0344d94d4f086e9eac Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 30 Sep 2024 16:40:41 +0200 Subject: [PATCH 150/280] fix int test --- .../test_node_ports_v2_nodeports2.py | 120 +++++++++++++----- 1 file changed, 90 insertions(+), 30 deletions(-) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index a9016609d13..757dfb82a6d 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -28,6 +28,7 @@ SimcoreS3FileID, ) from models_library.services_types import ServicePortKey +from pydantic import TypeAdapter from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings from simcore_sdk import node_ports_v2 @@ -224,7 +225,7 @@ async def test_port_value_accessors( item_pytype: type, option_r_clone_settings: RCloneSettings | None, ): # pylint: disable=W0613, W0621 - item_key = ServicePortKey("some_key") + item_key = TypeAdapter(ServicePortKey).validate_python("some_key") config_dict, _, _ = create_special_configuration( inputs=[(item_key, item_type, item_value)], outputs=[(item_key, item_type, None)], @@ -299,17 +300,26 @@ async def test_port_file_accessors( ) await check_config_valid(PORTS, config_dict) assert ( - await (await PORTS.outputs)[ServicePortKey("out_34")].get() is None + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() + is None ) # check emptyness with pytest.raises(exceptions.S3InvalidPathError): - await (await PORTS.inputs)[ServicePortKey("in_1")].get() + await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() # this triggers an upload to S3 + configuration change - await (await PORTS.outputs)[ServicePortKey("out_34")].set(item_value) + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].set(item_value) # this is the link to S3 storage - value = (await PORTS.outputs)[ServicePortKey("out_34")].value + value = (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].value assert isinstance(value, DownloadLink | PortLink | BaseFileLink) - received_file_link = value.dict(by_alias=True, exclude_unset=True) + received_file_link = value.model_dump(by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert ( received_file_link["path"] @@ -322,12 +332,21 @@ async def test_port_file_accessors( # this triggers a download from S3 to a location in /tempdir/simcorefiles/item_key assert isinstance( - await (await PORTS.outputs)[ServicePortKey("out_34")].get(), item_pytype + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get(), + item_pytype, ) - downloaded_file = await (await PORTS.outputs)[ServicePortKey("out_34")].get() + downloaded_file = await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() assert isinstance(downloaded_file, Path) assert downloaded_file.exists() - assert str(await (await PORTS.outputs)[ServicePortKey("out_34")].get()).startswith( + assert str( + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_34") + ].get() + ).startswith( str( Path( tempfile.gettempdir(), @@ -472,9 +491,16 @@ async def test_get_value_from_previous_node( ) await check_config_valid(PORTS, config_dict) - input_value = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + input_value = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(input_value, item_pytype) - assert await (await PORTS.inputs)[ServicePortKey("in_15")].get() == item_value + assert ( + await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() + == item_value + ) @pytest.mark.parametrize( @@ -516,7 +542,9 @@ async def test_get_file_from_previous_node( r_clone_settings=option_r_clone_settings, ) await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -577,7 +605,9 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name( postgres_db, project_id, this_node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_15")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_15") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -637,7 +667,9 @@ async def test_file_mapping( postgres_db, project_id, node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) - file_path = await (await PORTS.inputs)[ServicePortKey("in_1")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -648,7 +680,9 @@ async def test_file_mapping( ) # let's get it a second time to see if replacing works - file_path = await (await PORTS.inputs)[ServicePortKey("in_1")].get() + file_path = await (await PORTS.inputs)[ + TypeAdapter(ServicePortKey).validate_python("in_1") + ].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), @@ -665,9 +699,11 @@ async def test_file_mapping( assert isinstance(file_path, Path) await PORTS.set_file_by_keymap(file_path) file_id = create_valid_file_uuid("out_1", file_path) - value = (await PORTS.outputs)[ServicePortKey("out_1")].value + value = (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python("out_1") + ].value assert isinstance(value, DownloadLink | PortLink | BaseFileLink) - received_file_link = value.dict(by_alias=True, exclude_unset=True) + received_file_link = value.model_dump(by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert received_file_link["path"] == file_id # received a new eTag @@ -720,15 +756,19 @@ async def test_regression_concurrent_port_update_fails( # when writing in serial these are expected to work for item_key, _, _ in outputs: - await (await PORTS.outputs)[ServicePortKey(item_key)].set(int_item_value) - assert (await PORTS.outputs)[ServicePortKey(item_key)].value == int_item_value + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].set(int_item_value) + assert (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].value == int_item_value # when writing in parallel and reading back, # they fail, with enough concurrency async def _upload_create_task(item_key: str) -> None: - await (await PORTS.outputs)[ServicePortKey(item_key)].set( - parallel_int_item_value - ) + await (await PORTS.outputs)[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].set(parallel_int_item_value) # updating in parallel creates a race condition results = await gather( @@ -741,7 +781,7 @@ async def _upload_create_task(item_key: str) -> None: with pytest.raises(AssertionError) as exc_info: # noqa: PT012 for item_key, _, _ in outputs: assert (await PORTS.outputs)[ - ServicePortKey(item_key) + TypeAdapter(ServicePortKey).validate_python(item_key) ].value == parallel_int_item_value assert exc_info.value.args[0].startswith( @@ -773,7 +813,7 @@ async def test_batch_update_inputs_outputs( async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await PORTS.set_multiple( { - ServicePortKey(port.key): (k, None) + TypeAdapter(ServicePortKey).validate_python(port.key): (k, None) for k, port in enumerate((await PORTS.outputs).values()) }, progress_bar=progress_bar, @@ -782,7 +822,7 @@ async def test_batch_update_inputs_outputs( assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 await PORTS.set_multiple( { - ServicePortKey(port.key): (k, None) + TypeAdapter(ServicePortKey).validate_python(port.key): (k, None) for k, port in enumerate((await PORTS.inputs).values(), start=1000) }, progress_bar=progress_bar, @@ -793,18 +833,38 @@ async def test_batch_update_inputs_outputs( ports_inputs = await PORTS.inputs for k, asd in enumerate(outputs): item_key, _, _ = asd - assert ports_outputs[ServicePortKey(item_key)].value == k - assert await ports_outputs[ServicePortKey(item_key)].get() == k + assert ( + ports_outputs[TypeAdapter(ServicePortKey).validate_python(item_key)].value + == k + ) + assert ( + await ports_outputs[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].get() + == k + ) for k, asd in enumerate(inputs, start=1000): item_key, _, _ = asd - assert ports_inputs[ServicePortKey(item_key)].value == k - assert await ports_inputs[ServicePortKey(item_key)].get() == k + assert ( + ports_inputs[TypeAdapter(ServicePortKey).validate_python(item_key)].value + == k + ) + assert ( + await ports_inputs[ + TypeAdapter(ServicePortKey).validate_python(item_key) + ].get() + == k + ) # test missing key raises error async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: with pytest.raises(UnboundPortError): await PORTS.set_multiple( - {ServicePortKey("missing_key_in_both"): (123132, None)}, + { + TypeAdapter(ServicePortKey).validate_python( + "missing_key_in_both" + ): (123132, None) + }, progress_bar=progress_bar, ) From 10a80b32ad4066f4eab8c01f296230dd120f36fa Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 10:00:29 +0200 Subject: [PATCH 151/280] fix rabbit method name --- packages/service-library/src/servicelib/rabbitmq/_models.py | 2 +- packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/_models.py b/packages/service-library/src/servicelib/rabbitmq/_models.py index 52119847033..e48e4bb13aa 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_models.py +++ b/packages/service-library/src/servicelib/rabbitmq/_models.py @@ -24,7 +24,7 @@ def routing_key(self) -> str | None: class RPCNamespacedMethodName(ConstrainedStr): min_length: int = 1 max_length: int = 255 - regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS) + pattern: str = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS @classmethod def from_namespace_and_method( diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py index 996e8e6dc4c..fc37076d56d 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py @@ -350,11 +350,12 @@ async def _a_handler() -> None: pass if expect_fail: - with pytest.raises(ValidationError) as exec_info: + with pytest.raises( + ValidationError, match=r"String should have at most \d+ characters" + ): await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler ) - assert "String should have at most 252 characters" in f"{exec_info.value}" else: await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler From 3f91e305acf4e44a144b6cfb17c5a41050e952e9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 10:13:00 +0200 Subject: [PATCH 152/280] remove deprecated --- .../dask_task_models_library/container_tasks/io.py | 2 +- .../tests/container_tasks/test_io.py | 14 +++++++------- .../src/pytest_simcore/docker_registry.py | 4 ++-- .../src/pytest_simcore/pydantic_models.py | 2 +- .../src/pytest_simcore/rabbit_service.py | 2 +- .../services_api_mocks_for_aiohttp_clients.py | 10 +++++----- .../src/service_integration/cli/__init__.py | 2 +- .../aiohttp/long_running_tasks/client.py | 4 ++-- .../src/servicelib/aiohttp/requests_validation.py | 2 +- .../deferred_tasks/_redis_task_tracker.py | 4 +++- .../fastapi/long_running_tasks/_context_manager.py | 2 +- .../src/servicelib/long_running_tasks/_task.py | 6 ++++-- .../tests/aiohttp/long_running_tasks/conftest.py | 2 +- .../long_running_tasks/test_long_running_tasks.py | 6 +++--- .../service-library/tests/fastapi/test_rabbitmq.py | 2 +- .../tests/test__models_examples.py | 2 +- .../node_ports_common/storage_client.py | 10 +++++----- packages/simcore-sdk/tests/integration/conftest.py | 2 +- .../tests/unit/test_node_data_data_manager.py | 2 +- 19 files changed, 42 insertions(+), 38 deletions(-) diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index 5f46f75d93b..0bb95130723 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -175,7 +175,7 @@ def from_task_output( msg = f"Could not locate '{output_key}' in {output_data_file}" raise ValueError(msg) - return cls.parse_obj(data) + return cls.model_validate(data) model_config = ConfigDict( json_schema_extra={ diff --git a/packages/dask-task-models-library/tests/container_tasks/test_io.py b/packages/dask-task-models-library/tests/container_tasks/test_io.py index d0b4cb196ed..db6357d930c 100644 --- a/packages/dask-task-models-library/tests/container_tasks/test_io.py +++ b/packages/dask-task-models-library/tests/container_tasks/test_io.py @@ -30,7 +30,7 @@ def test_io_models_examples(model_cls, model_cls_examples): for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = model_cls.parse_obj(example) + model_instance = model_cls.model_validate(example) assert model_instance, f"Failed with {name}" print(name, ":", model_instance) @@ -73,7 +73,7 @@ def test_create_task_output_from_task_with_optional_fields_as_required( "examples" ]: - task_output_schema = TaskOutputDataSchema.parse_obj(schema_example) + task_output_schema = TaskOutputDataSchema.model_validate(schema_example) outputs_file_name = _create_fake_outputs( task_output_schema, tmp_path, optional_fields_set, faker ) @@ -94,7 +94,7 @@ def test_create_task_output_from_task_with_optional_fields_as_required( def test_create_task_output_from_task_throws_when_there_are_missing_files( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "required_file_output": { "required": True, @@ -115,7 +115,7 @@ def test_create_task_output_from_task_throws_when_there_are_missing_files( def test_create_task_output_from_task_does_not_throw_when_there_are_optional_missing_files( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "optional_file_output": { "required": False, @@ -136,7 +136,7 @@ def test_create_task_output_from_task_does_not_throw_when_there_are_optional_mis def test_create_task_output_from_task_throws_when_there_are_entries( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "some_output": { "required": True, @@ -155,7 +155,7 @@ def test_create_task_output_from_task_throws_when_there_are_entries( def test_create_task_output_from_task_does_not_throw_when_there_are_optional_entries( tmp_path: Path, faker: Faker ): - task_output_schema = TaskOutputDataSchema.parse_obj( + task_output_schema = TaskOutputDataSchema.model_validate( { "some_output": { "required": False, @@ -184,6 +184,6 @@ def test_objects_are_compatible_with_dask_requirements(model_cls, model_cls_exam for name, example in model_cls_examples.items(): print(name, ":", pformat(example)) - model_instance = model_cls.parse_obj(example) + model_instance = model_cls.model_validate(example) reloaded_instance = loads(dumps(model_instance)) assert reloaded_instance == model_instance diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py index 5780937a2c0..91cd5e2d428 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py @@ -106,9 +106,9 @@ def external_registry_settings( if external_envfile_dict: config = { field: external_envfile_dict.get(field, None) - for field in RegistrySettings.__fields__ + for field in RegistrySettings.model_fields } - return RegistrySettings.parse_obj(config) + return RegistrySettings.model_validate(config) return None diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py index 035e793ccbf..04d285a601e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py @@ -64,7 +64,7 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json.dumps(example_data)) - assert model_cls.parse_obj(example_data) + assert model_cls.model_validate(example_data) """ def _is_model_cls(obj) -> bool: diff --git a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py index 47188400e79..240e0100648 100644 --- a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py @@ -56,7 +56,7 @@ def rabbit_env_vars_dict( async def rabbit_settings(rabbit_env_vars_dict: EnvVarsDict) -> RabbitSettings: """Returns the settings of a rabbit service that is up and responsive""" - settings = RabbitSettings.parse_obj(rabbit_env_vars_dict) + settings = RabbitSettings.model_validate(rabbit_env_vars_dict) await wait_till_rabbit_responsive(settings.dsn) return settings diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index f87b2e3b749..429783e7061 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -107,7 +107,7 @@ def create_computation_cb(url, **kwargs) -> CallbackResult: "62237c33-8d6c-4709-aa92-c3cf693dd6d2", ], } - returned_computation = ComputationTask.parse_obj( + returned_computation = ComputationTask.model_validate( ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ @@ -131,7 +131,7 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: state = RunningState.NOT_STARTED pipeline: dict[str, list[str]] = FULL_PROJECT_PIPELINE_ADJACENCY node_states = FULL_PROJECT_NODE_STATES - returned_computation = ComputationTask.parse_obj( + returned_computation = ComputationTask.model_validate( ComputationTask.model_config["json_schema_extra"]["examples"][0] ).copy( update={ @@ -169,7 +169,7 @@ def list_clusters_cb(url, **kwargs) -> CallbackResult: body=json.dumps( [ json.loads( - Cluster.parse_obj( + Cluster.model_validate( random.choice( Cluster.model_config["json_schema_extra"]["examples"] ) @@ -187,7 +187,7 @@ def get_cluster_cb(url, **kwargs) -> CallbackResult: return CallbackResult( status=200, payload=json.loads( - Cluster.parse_obj( + Cluster.model_validate( { **random.choice( Cluster.model_config["json_schema_extra"]["examples"] @@ -218,7 +218,7 @@ def patch_cluster_cb(url, **kwargs) -> CallbackResult: return CallbackResult( status=200, payload=json.loads( - Cluster.parse_obj( + Cluster.model_validate( { **random.choice( Cluster.model_config["json_schema_extra"]["examples"] diff --git a/packages/service-integration/src/service_integration/cli/__init__.py b/packages/service-integration/src/service_integration/cli/__init__.py index 7a1c058957e..a146de5735d 100644 --- a/packages/service-integration/src/service_integration/cli/__init__.py +++ b/packages/service-integration/src/service_integration/cli/__init__.py @@ -62,7 +62,7 @@ def main( overrides["COMPOSE_VERSION"] = compose_version # save states - ctx.settings = AppSettings.parse_obj(overrides) # type: ignore[attr-defined] # pylint:disable=no-member + ctx.settings = AppSettings.model_validate(overrides) # type: ignore[attr-defined] # pylint:disable=no-member # diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py index c99cb1ce671..04071d5d07c 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py @@ -35,7 +35,7 @@ async def _start(session: ClientSession, url: URL, json: RequestBody | None) -> data, error = unwrap_envelope(await response.json()) assert not error # nosec assert data is not None # nosec - return TaskGet.parse_obj(data) + return TaskGet.model_validate(data) @retry(**_DEFAULT_AIOHTTP_RETRY_POLICY) @@ -57,7 +57,7 @@ async def _wait_for_completion( data, error = unwrap_envelope(await response.json()) assert not error # nosec assert data is not None # nosec - task_status = TaskStatus.parse_obj(data) + task_status = TaskStatus.model_validate(data) yield task_status.task_progress if not task_status.done: await asyncio.sleep( diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 59d4b69ce30..b70f0e821e4 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -140,7 +140,7 @@ def parse_request_path_parameters_as( use_error_v1=use_enveloped_error_v1, ): data = dict(request.match_info) - return parameters_schema_cls.parse_obj(data) + return parameters_schema_cls.model_validate(data) def parse_request_query_parameters_as( diff --git a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py index 718af8c526f..382cb6c9f04 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_redis_task_tracker.py @@ -43,7 +43,9 @@ async def get(self, task_uid: TaskUID) -> TaskScheduleModel | None: return await self._get_raw(_get_key(task_uid)) async def save(self, task_uid: TaskUID, task_schedule: TaskScheduleModel) -> None: - await self.redis_client_sdk.redis.set(_get_key(task_uid), task_schedule.json()) + await self.redis_client_sdk.redis.set( + _get_key(task_uid), task_schedule.model_dump_json() + ) async def remove(self, task_uid: TaskUID) -> None: await self.redis_client_sdk.redis.delete(_get_key(task_uid)) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py index 7cb61f29140..2c001525173 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py @@ -96,7 +96,7 @@ async def periodic_task_result( async def _status_update() -> TaskStatus: task_status: TaskStatus = await client.get_task_status(task_id) - logger.debug("Task status %s", task_status.json()) + logger.debug("Task status %s", task_status.model_dump_json()) await progress_manager.update( task_id=task_id, message=task_status.task_progress.message, diff --git a/packages/service-library/src/servicelib/long_running_tasks/_task.py b/packages/service-library/src/servicelib/long_running_tasks/_task.py index 88960cb6327..641e78a96a8 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_task.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_task.py @@ -123,7 +123,9 @@ async def _stale_tasks_monitor_worker(self) -> None: logger.warning( "Removing stale task '%s' with status '%s'", task_id, - self.get_task_status(task_id, with_task_context=None).json(), + self.get_task_status( + task_id, with_task_context=None + ).model_dump_json(), ) await self.remove_task( task_id, with_task_context=None, reraise_errors=False @@ -210,7 +212,7 @@ def get_task_status( task = tracked_task.task done = task.done() - return TaskStatus.parse_obj( + return TaskStatus.model_validate( { "task_progress": tracked_task.task_progress, "done": done, diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py index f4fcc9b318c..987a68a4036 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py @@ -122,7 +122,7 @@ async def _waiter( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.parse_obj(data) + task_status = long_running_tasks.server.TaskStatus.model_validate(data) assert task_status assert task_status.done diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py index 75605862287..52527f138d9 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py @@ -124,7 +124,7 @@ async def _waiter( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.parse_obj( + task_status = long_running_tasks.server.TaskStatus.model_validate( result.json() ) assert task_status @@ -183,7 +183,7 @@ async def test_workflow( result = await client.get(f"{result_url}") # NOTE: this is DIFFERENT than with aiohttp where we return the real result assert result.status_code == status.HTTP_200_OK - task_result = long_running_tasks.server.TaskResult.parse_obj(result.json()) + task_result = long_running_tasks.server.TaskResult.model_validate(result.json()) assert not task_result.error assert task_result.result == [f"{x}" for x in range(10)] # getting the result again should raise a 404 @@ -222,7 +222,7 @@ async def test_failing_task_returns_error( result_url = app.url_path_for("get_task_result", task_id=task_id) result = await client.get(f"{result_url}") assert result.status_code == status.HTTP_200_OK - task_result = long_running_tasks.server.TaskResult.parse_obj(result.json()) + task_result = long_running_tasks.server.TaskResult.model_validate(result.json()) assert not task_result.result assert task_result.error diff --git a/packages/service-library/tests/fastapi/test_rabbitmq.py b/packages/service-library/tests/fastapi/test_rabbitmq.py index 9c94cfa0766..b41a94097f2 100644 --- a/packages/service-library/tests/fastapi/test_rabbitmq.py +++ b/packages/service-library/tests/fastapi/test_rabbitmq.py @@ -132,6 +132,6 @@ async def test_post_message( f"--> checking for message in rabbit exchange {rabbit_message.channel_name}, {attempt.retry_state.retry_object.statistics}" ) mocked_message_handler.assert_called_once_with( - rabbit_message.json().encode() + rabbit_message.model_dump_json().encode() ) print("... message received") diff --git a/packages/settings-library/tests/test__models_examples.py b/packages/settings-library/tests/test__models_examples.py index c60a6c08261..96ffc7135b2 100644 --- a/packages/settings-library/tests/test__models_examples.py +++ b/packages/settings-library/tests/test__models_examples.py @@ -14,6 +14,6 @@ def test_all_settings_library_models_config_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index c249cbcf830..b7a394a6dbd 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -144,7 +144,7 @@ async def get_storage_locations( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}"}, ) as response: - locations_enveloped = Envelope[FileLocationArray].parse_obj( + locations_enveloped = Envelope[FileLocationArray].model_validate( await response.json() ) if locations_enveloped.data is None: @@ -173,7 +173,7 @@ async def get_download_file_link( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}", "link_type": link_type.value}, ) as response: - presigned_link_enveloped = Envelope[PresignedLink].parse_obj( + presigned_link_enveloped = Envelope[PresignedLink].model_validate( await response.json() ) if not presigned_link_enveloped.data or not presigned_link_enveloped.data.link: @@ -215,7 +215,7 @@ async def get_upload_file_links( expected_status=status.HTTP_200_OK, params=query_params, ) as response: - file_upload_links_enveloped = Envelope[FileUploadSchema].parse_obj( + file_upload_links_enveloped = Envelope[FileUploadSchema].model_validate( await response.json() ) if file_upload_links_enveloped.data is None: @@ -245,7 +245,7 @@ async def get_file_metadata( # NOTE: keeps backwards compatibility raise exceptions.S3InvalidPathError(file_id) - file_metadata_enveloped = Envelope[FileMetaDataGet].parse_obj(payload) + file_metadata_enveloped = Envelope[FileMetaDataGet].model_validate(payload) assert file_metadata_enveloped.data # nosec return file_metadata_enveloped.data @@ -265,7 +265,7 @@ async def list_file_metadata( expected_status=status.HTTP_200_OK, params={"user_id": f"{user_id}", "uuid_filter": uuid_filter}, ) as resp: - envelope = Envelope[list[FileMetaDataGet]].parse_obj(await resp.json()) + envelope = Envelope[list[FileMetaDataGet]].model_validate(await resp.json()) assert envelope.data is not None # nosec file_meta_data: list[FileMetaDataGet] = envelope.data return file_meta_data diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index d5f6cd7227a..92b6afaa81b 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -142,7 +142,7 @@ async def _create(file_path: Path) -> dict[str, Any]: async with ClientSession() as session: async with session.put(url) as resp: resp.raise_for_status() - presigned_links_enveloped = Envelope[FileUploadSchema].parse_obj( + presigned_links_enveloped = Envelope[FileUploadSchema].model_validate( await resp.json() ) assert presigned_links_enveloped.data diff --git a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py index c1edb4f183c..a578d410605 100644 --- a/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/unit/test_node_data_data_manager.py @@ -40,7 +40,7 @@ def _create_files(number: int, folder: Path) -> list[Path]: @pytest.fixture def r_clone_settings(faker: Faker) -> RCloneSettings: - return RCloneSettings.parse_obj( + return RCloneSettings.model_validate( { "R_CLONE_S3": { "S3_ENDPOINT": faker.url(), From 9ebf506eed9b10a559b2d0af7a4e3e9bd1257bd6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 12:56:18 +0200 Subject: [PATCH 153/280] replace deprecated dict --- .../src/simcore_postgres_database/utils_projects_nodes.py | 2 +- .../postgres-database/tests/test_utils_projects_nodes.py | 4 ++-- .../pytest-simcore/src/pytest_simcore/minio_service.py | 2 +- .../src/service_integration/cli/_compose_spec.py | 2 +- .../service-library/tests/aiohttp/test_docker_utils.py | 4 ++-- .../tests/aiohttp/test_requests_validation.py | 8 ++++---- .../service-library/tests/fastapi/test_docker_utils.py | 4 ++-- .../src/simcore_sdk/node_ports_v2/serialization_v2.py | 2 +- .../tests/integration/test_node_ports_v2_nodeports2.py | 2 +- .../tests/unit/test_node_ports_v2_port_validation.py | 2 +- 10 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index ba1028b3bc4..413407f254f 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -83,7 +83,7 @@ async def add( [ { "project_uuid": f"{self.project_uuid}", - **node.dict(), + **node.model_dump(), } for node in nodes ] diff --git a/packages/postgres-database/tests/test_utils_projects_nodes.py b/packages/postgres-database/tests/test_utils_projects_nodes.py index a20083608dd..50d2af96911 100644 --- a/packages/postgres-database/tests/test_utils_projects_nodes.py +++ b/packages/postgres-database/tests/test_utils_projects_nodes.py @@ -412,9 +412,9 @@ async def test_get_project_id_from_node_id_raises_if_multiple_projects_with_same assert len(project1_nodes) == 1 project2_nodes = await project2_repo.add(connection, nodes=[shared_node]) assert len(project2_nodes) == 1 - assert project1_nodes[0].dict( + assert project1_nodes[0].model_dump( include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) - ) == project2_nodes[0].dict( + ) == project2_nodes[0].model_dump( include=ProjectNodeCreate.get_field_names(exclude={"created", "modified"}) ) with pytest.raises(ProjectNodesNonUniqueNodeFoundError): diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 46cee6fbeeb..38b9d2bdf8d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -31,5 +31,5 @@ def minio_s3_settings_envs( minio_s3_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = minio_s3_settings.dict(exclude_unset=True) + changed_envs: EnvVarsDict = minio_s3_settings.model_dump(exclude_unset=True) return setenvs_from_dict(monkeypatch, changed_envs) diff --git a/packages/service-integration/src/service_integration/cli/_compose_spec.py b/packages/service-integration/src/service_integration/cli/_compose_spec.py index 117a4afa5ef..f6d9b16be9e 100644 --- a/packages/service-integration/src/service_integration/cli/_compose_spec.py +++ b/packages/service-integration/src/service_integration/cli/_compose_spec.py @@ -196,7 +196,7 @@ def create_compose( for n, config_name in enumerate(configs_kwargs_map): nth_compose_spec = create_docker_compose_image_spec( settings, **configs_kwargs_map[config_name] - ).dict(exclude_unset=True) + ).model_dump(exclude_unset=True) if n == 0: compose_spec_dict = nth_compose_spec diff --git a/packages/service-library/tests/aiohttp/test_docker_utils.py b/packages/service-library/tests/aiohttp/test_docker_utils.py index 4b5c9747c7f..bcd2129abd2 100644 --- a/packages/service-library/tests/aiohttp/test_docker_utils.py +++ b/packages/service-library/tests/aiohttp/test_docker_utils.py @@ -96,13 +96,13 @@ def _assert_progress_report_values( # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=0, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( exclude={"message"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=total, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( exclude={"message"} ) diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index a5f4d875946..4e1b4f4e2e7 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -55,7 +55,7 @@ class MyRequestQueryParams(BaseModel): label: str def as_params(self, **kwargs) -> dict[str, str]: - data = self.dict(**kwargs) + data = self.model_dump(**kwargs) return {k: f"{v}" for k, v in data.items()} @classmethod @@ -324,7 +324,7 @@ async def test_parse_request_with_invalid_json_body( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), data=b"[ 1 2, 3 'broken-json' ]", - headers=headers_params.dict(by_alias=True), + headers=headers_params.model_dump(by_alias=True), ) body = await r.text() @@ -342,8 +342,8 @@ async def test_parse_request_with_invalid_headers_params( r = await client.get( f"/projects/{path_params.project_uuid}", params=query_params.as_params(), - json=body.dict(), - headers=headers_params.dict(), # we pass the wrong names + json=body.model_dump(), + headers=headers_params.model_dump(), # we pass the wrong names ) assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" diff --git a/packages/service-library/tests/fastapi/test_docker_utils.py b/packages/service-library/tests/fastapi/test_docker_utils.py index ded21eb0f8b..f6d78066c97 100644 --- a/packages/service-library/tests/fastapi/test_docker_utils.py +++ b/packages/service-library/tests/fastapi/test_docker_utils.py @@ -102,13 +102,13 @@ def _assert_progress_report_values( # check first progress assert mocked_progress_cb.call_args_list[0].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=0, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=0, total=total, unit="Byte").model_dump( exclude={"message"} ) # check last progress assert mocked_progress_cb.call_args_list[-1].args[0].dict( exclude={"message"} - ) == ProgressReport(actual_value=total, total=total, unit="Byte").dict( + ) == ProgressReport(actual_value=total, total=total, unit="Byte").model_dump( exclude={"message"} ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py index daa4c9aaa3e..f4d74711e18 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py @@ -115,7 +115,7 @@ async def dump(nodeports: Nodeports) -> None: "dumping node_ports_v2 object %s", pformat(nodeports, indent=2), ) - _nodeports_cfg = nodeports.dict( + _nodeports_cfg = nodeports.model_dump( include={"internal_inputs", "internal_outputs"}, by_alias=True, exclude_unset=True, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index 757dfb82a6d..ec24f271394 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -91,7 +91,7 @@ async def _check_port_valid( assert port.value assert isinstance(port.value, DownloadLink | PortLink | BaseFileLink) assert ( - port.value.dict(by_alias=True, exclude_unset=True) + port.value.model_dump(by_alias=True, exclude_unset=True) == port_values[key_name] ) else: diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py index bfaa762c3a8..d636bb9c3f6 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py @@ -143,7 +143,7 @@ class A(BaseModel): "contentSchema": content_schema, } sample = [{"i": 5, "s": "x", "l": [1, 2]}, {"i": 6, "s": "y", "l": [2]}] - expected_value = [A(**i).dict() for i in sample] + expected_value = [A(**i).model_dump() for i in sample] print(json.dumps(port_meta, indent=1)) print(json.dumps(expected_value, indent=1)) From acc5c7fc5441acbf0ce141f9eadf5965064005ba Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 13:22:21 +0200 Subject: [PATCH 154/280] fix validation info --- .../computational_sidecar/models.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py index 24c5028b147..1ce89bf1b68 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/models.py @@ -3,7 +3,7 @@ from models_library.basic_regex import SIMPLE_VERSION_RE from models_library.services import ServiceMetaDataPublished from packaging import version -from pydantic import BaseModel, ByteSize, ConfigDict, Field, field_validator +from pydantic import BaseModel, ByteSize, ConfigDict, Field, ValidationInfo, field_validator LEGACY_INTEGRATION_VERSION = version.Version("0") PROGRESS_REGEXP: re.Pattern[str] = re.compile( @@ -44,16 +44,16 @@ class ContainerHostConfig(BaseModel): @field_validator("memory_swap", mode="before") @classmethod - def ensure_no_memory_swap_means_no_swap(cls, v, values): + def ensure_no_memory_swap_means_no_swap(cls, v, info: ValidationInfo): if v is None: # if not set it will be the same value as memory to ensure swap is disabled - return values["memory"] + return info.data["memory"] return v @field_validator("memory_swap") @classmethod - def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, values): - if v < values["memory"]: + def ensure_memory_swap_cannot_be_unlimited_nor_smaller_than_memory(cls, v, info: ValidationInfo): + if v < info.data["memory"]: msg = "Memory swap cannot be set to a smaller value than memory" raise ValueError(msg) return v From 9d0bb3e275906e674d0edcc8f7a3d673f587fa37 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 13:26:39 +0200 Subject: [PATCH 155/280] upgrade requirements --- .../dynamic-scheduler/requirements/_base.txt | 44 ++++++++++--------- .../dynamic-scheduler/requirements/_test.txt | 13 ------ .../dynamic-scheduler/requirements/_tools.txt | 11 ----- 3 files changed, 24 insertions(+), 44 deletions(-) diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index 6a29f3ea11a..f73100bd9dc 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -22,6 +22,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -35,10 +37,7 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 - # via - # aiohttp - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -65,19 +64,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -163,13 +153,12 @@ prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -181,6 +170,20 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -188,7 +191,9 @@ pyinstrument==4.6.2 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via uvicorn + # via + # pydantic-settings + # uvicorn pyyaml==6.0.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -243,7 +248,7 @@ sqlalchemy==1.4.52 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -272,12 +277,11 @@ typing-extensions==4.10.0 # aiodebug # aiodocker # alembic - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer - # uvicorn uvicorn==0.29.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index c8ed470df58..b26c6841581 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -19,11 +19,6 @@ coverage==7.6.1 # pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in h11==0.14.0 @@ -104,14 +99,6 @@ sniffio==1.3.1 # httpx termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest -typing-extensions==4.10.0 - # via - # -c requirements/_base.txt - # anyio urllib3==2.2.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt index c724e2ead52..f02a5246a65 100644 --- a/services/dynamic-scheduler/requirements/_tools.txt +++ b/services/dynamic-scheduler/requirements/_tools.txt @@ -69,22 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt - # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 0c12d9eef87715543049398b994adf067b25ace7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 13:28:48 +0200 Subject: [PATCH 156/280] upgrade requirements --- .../dynamic-sidecar/requirements/_base.txt | 52 ++++++++++--------- .../dynamic-sidecar/requirements/_test.txt | 17 ++---- .../dynamic-sidecar/requirements/_tools.txt | 10 ---- 3 files changed, 31 insertions(+), 48 deletions(-) diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index 0416a0dc9f0..0ce44647c36 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -50,6 +50,8 @@ alembic==1.13.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -67,10 +69,8 @@ arrow==1.3.0 # -r requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -106,26 +106,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -243,13 +227,12 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -276,6 +259,26 @@ pydantic==1.10.15 # -r requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -284,6 +287,8 @@ pyinstrument==4.6.2 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via arrow +python-dotenv==1.0.1 + # via pydantic-settings python-engineio==4.9.0 # via python-socketio python-magic==0.4.27 @@ -374,7 +379,7 @@ sqlalchemy==1.4.52 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -419,13 +424,12 @@ typing-extensions==4.11.0 # aiodebug # aiodocker # alembic - # anyio # fastapi # faststream # pint # pydantic + # pydantic-core # typer - # uvicorn u-msgpack-python==2.8.0 # via -r requirements/_base.in uvicorn==0.29.0 diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index ba822234e96..b1e80961551 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -21,10 +21,6 @@ asgi-lifespan==2.1.0 # via -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp attrs==23.2.0 # via # -c requirements/_base.txt @@ -47,10 +43,6 @@ coverage==7.6.1 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # pytest faker==27.0.0 # via -r requirements/_test.in flaky==3.8.1 @@ -111,7 +103,9 @@ python-dateutil==2.9.0.post0 # botocore # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in requests==2.32.3 # via # async-asgi-testclient @@ -133,11 +127,6 @@ sqlalchemy==1.4.52 # -r requirements/_test.in sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiobotocore-s3==2.13.2 # via -r requirements/_test.in types-aiofiles==24.1.0.20240626 diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index 32ff7865877..6ba5bd3db49 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -72,22 +72,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.11.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 3f990c541bf691c68bfbebb2c0006f8603ce1f7f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 13:53:08 +0200 Subject: [PATCH 157/280] continue upgrading --- .../src/simcore_service_dynamic_sidecar/api/containers.py | 4 ++-- .../tests/unit/test_modules_system_monitor__notifier.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py index 73c772cc44f..71ec22cec91 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py @@ -12,7 +12,7 @@ ActivityInfo, ActivityInfoOrNone, ) -from pydantic import parse_raw_as +from pydantic import TypeAdapter from servicelib.fastapi.requests_decorators import cancel_on_disconnect from ..core.docker_utils import docker_client @@ -174,7 +174,7 @@ async def get_containers_activity( return ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) try: - return parse_raw_as(ActivityInfo, inactivity_response) + return TypeAdapter(ActivityInfo).validate_json(inactivity_response) except json.JSONDecodeError: _logger.warning( "Could not parse command result '%s' as '%s'", diff --git a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py index 73184a1b3cb..30d410f493f 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_system_monitor__notifier.py @@ -21,7 +21,7 @@ from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import ByteSize, NonNegativeInt, parse_obj_as +from pydantic import ByteSize, NonNegativeInt, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather @@ -112,7 +112,7 @@ def _get_on_service_disk_usage_event( # emulates front-end receiving message async def on_service_status(data): - assert parse_obj_as(ServiceDiskUsage, data) is not None + assert TypeAdapter(ServiceDiskUsage).validate_python(data) is not None on_event_spy = AsyncMock(wraps=on_service_status) socketio_client.on(SOCKET_IO_SERVICE_DISK_USAGE_EVENT, on_event_spy) @@ -132,7 +132,7 @@ def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage: return DiskUsage( total=ByteSize(0), used=ByteSize(0), - free=ByteSize.validate(byte_size_str), + free=TypeAdapter(ByteSize).validate_python(byte_size_str), used_percent=0, ) From 94c33aab99939190c94e43404b88c6ab83c2c660 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 14:23:27 +0200 Subject: [PATCH 158/280] fix field --- packages/settings-library/src/settings_library/postgres.py | 2 +- packages/settings-library/src/settings_library/prometheus.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index d58fc462d98..e224b1d71f8 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -18,7 +18,7 @@ class PostgresSettings(BaseCustomSettings): # entrypoint POSTGRES_HOST: str - POSTGRES_PORT: PortInt = PortInt(5432) + POSTGRES_PORT: PortInt = 5432 # auth POSTGRES_USER: str diff --git a/packages/settings-library/src/settings_library/prometheus.py b/packages/settings-library/src/settings_library/prometheus.py index bee0399baf2..9c40293d463 100644 --- a/packages/settings-library/src/settings_library/prometheus.py +++ b/packages/settings-library/src/settings_library/prometheus.py @@ -9,7 +9,7 @@ class PrometheusSettings(BaseCustomSettings, MixinServiceSettings): PROMETHEUS_URL: AnyUrl - PROMETHEUS_VTAG: VersionTag = VersionTag("v1") + PROMETHEUS_VTAG: VersionTag = "v1" PROMETHEUS_USERNAME: str | None = None PROMETHEUS_PASSWORD: SecretStr | None = None From 7b5962a1c3549a6cdd8084121eb37019a45a0027 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 15:30:42 +0200 Subject: [PATCH 159/280] fix error base class --- .../rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 44549841802..0307dc8c29a 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class ResourceUsageTrackerRuntimeError(PydanticErrorMixin, RuntimeError): +class ResourceUsageTrackerRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "Resource-usage-tracker unexpected error" From 8bbd32f1f7df50487a68d4eb78a0a3d58d8b69ea Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 15:32:40 +0200 Subject: [PATCH 160/280] replace deprecated method --- .../src/simcore_service_efs_guardian/core/application.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index 88c20f25ea3..e93b071761c 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -20,7 +20,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - logger.info("app settings: %s", settings.json(indent=1)) + logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( debug=settings.EFS_GUARDIAN_DEBUG, From 9916dc1b60ea5ba9ad58c41b20f6b5592714dcff Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 15:43:08 +0200 Subject: [PATCH 161/280] upgrade requirements --- services/autoscaling/requirements/_base.txt | 53 +++++++++++--------- services/autoscaling/requirements/_test.txt | 30 +++++------ services/autoscaling/requirements/_tools.txt | 10 ---- 3 files changed, 42 insertions(+), 51 deletions(-) diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index 0716ffccf41..ba0fe4475e8 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -46,6 +46,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -57,13 +59,10 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==23.2.0 # via # aiohttp @@ -119,24 +118,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.1 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -272,7 +257,7 @@ psutil==5.9.8 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -283,7 +268,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -300,6 +284,26 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -310,6 +314,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -378,7 +384,7 @@ sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -440,16 +446,15 @@ typing-extensions==4.11.0 # via # aiodebug # aiodocker - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm - # uvicorn urllib3==2.2.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index dca31f29f75..01c1f0b275e 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -1,3 +1,7 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -6,10 +10,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==23.2.0 # via # -c requirements/_base.txt @@ -69,11 +69,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.1 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in fakeredis==2.23.5 @@ -188,11 +183,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -224,7 +223,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../requirements/constraints.txt @@ -289,10 +290,6 @@ sympy==1.13.2 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # pytest types-aiobotocore==2.13.0 # via # -c requirements/_base.txt @@ -320,11 +317,10 @@ types-pyyaml==6.0.12.20240808 typing-extensions==4.11.0 # via # -c requirements/_base.txt - # anyio # aws-sam-translator # cfn-lint - # fakeredis # pydantic + # pydantic-core # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-iam diff --git a/services/autoscaling/requirements/_tools.txt b/services/autoscaling/requirements/_tools.txt index db86636a373..7a2c3f9d91b 100644 --- a/services/autoscaling/requirements/_tools.txt +++ b/services/autoscaling/requirements/_tools.txt @@ -74,22 +74,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.11.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 9cdb60b995ba20cefd905a9d24ce337559b185f6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 15:49:49 +0200 Subject: [PATCH 162/280] run bump-pydantic --- .../core/settings.py | 71 +++++++++++-------- 1 file changed, 42 insertions(+), 29 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index 299293056d8..bae5f6dc85e 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Any, ClassVar, Final, cast +from typing import Final, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -14,14 +14,16 @@ from models_library.clusters import InternalClusterAuthentication from models_library.docker import DockerLabelKey from pydantic import ( + AliasChoices, AnyUrl, Field, NonNegativeInt, PositiveInt, - parse_obj_as, - root_validator, - validator, + TypeAdapter, + field_validator, + model_validator, ) +from pytest_simcore.helpers.dict_tools import ConfigDict from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -41,10 +43,9 @@ class AutoscalingSSMSettings(SSMSettings): class AutoscalingEC2Settings(EC2Settings): - class Config(EC2Settings.Config): - env_prefix = AUTOSCALING_ENV_PREFIX - - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + env_prefix=AUTOSCALING_ENV_PREFIX, + json_schema_extra={ "examples": [ { f"{AUTOSCALING_ENV_PREFIX}EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -53,7 +54,8 @@ class Config(EC2Settings.Config): f"{AUTOSCALING_ENV_PREFIX}EC2_SECRET_ACCESS_KEY": "my_secret_access_key", } ], - } + }, + ) class EC2InstancesSettings(BaseCustomSettings): @@ -93,7 +95,7 @@ class EC2InstancesSettings(BaseCustomSettings): EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -130,7 +132,7 @@ class EC2InstancesSettings(BaseCustomSettings): description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", ) - @validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") + @field_validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") @classmethod def ensure_draining_delay_time_is_in_range( cls, value: datetime.timedelta @@ -141,7 +143,7 @@ def ensure_draining_delay_time_is_in_range( value = datetime.timedelta(minutes=1) return value - @validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION") + @field_validator("EC2_INSTANCES_TIME_BEFORE_TERMINATION") @classmethod def ensure_termination_delay_time_is_in_range( cls, value: datetime.timedelta @@ -152,14 +154,14 @@ def ensure_termination_delay_time_is_in_range( value = datetime.timedelta(minutes=59) return value - @validator("EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) return value @@ -217,36 +219,39 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- AUTOSCALING_DEBUG: bool = Field( - default=False, description="Debug mode", env=["AUTOSCALING_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("AUTOSCALING_DEBUG", "DEBUG"), ) - AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) + AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = 3000 AUTOSCALING_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices("AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) AUTOSCALING_EC2_ACCESS: AutoscalingEC2Settings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_SSM_ACCESS: AutoscalingSSMSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_NODES_MONITORING: NodesMonitoringSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) AUTOSCALING_POLL_INTERVAL: datetime.timedelta = Field( @@ -255,13 +260,21 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", ) - AUTOSCALING_RABBITMQ: RabbitSettings | None = Field(auto_default_from_env=True) + AUTOSCALING_RABBITMQ: RabbitSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_REDIS: RedisSettings = Field(auto_default_from_env=True) + AUTOSCALING_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_REGISTRY: RegistrySettings | None = Field(auto_default_from_env=True) + AUTOSCALING_REGISTRY: RegistrySettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - AUTOSCALING_DASK: DaskMonitoringSettings | None = Field(auto_default_from_env=True) + AUTOSCALING_DASK: DaskMonitoringSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) AUTOSCALING_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True @@ -276,12 +289,12 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self): # noqa: N802 return self.AUTOSCALING_LOGLEVEL - @validator("AUTOSCALING_LOGLEVEL") + @field_validator("AUTOSCALING_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) - @root_validator() + @model_validator(mode="after") @classmethod def exclude_both_dynamic_computational_mode(cls, values): if ( From cddd4399b0a241d6d6e80f71f9df901706eb357d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 16:15:02 +0200 Subject: [PATCH 163/280] replace parse_obj --- .../services/webserver.py | 4 +-- .../tests/unit/_with_db/test_api_user.py | 2 +- .../tests/unit/api_solvers/conftest.py | 4 +-- .../test_api_routers_solvers_jobs.py | 6 ++-- .../test_api_routers_solvers_jobs_logs.py | 14 ++++---- .../test_api_routers_solvers_jobs_metadata.py | 12 +++---- .../test_api_routes_studies_jobs.py | 8 ++--- services/api-server/tests/unit/conftest.py | 4 +-- .../api-server/tests/unit/test_api_files.py | 4 ++- .../tests/unit/test_api_solver_jobs.py | 12 +++---- .../api-server/tests/unit/test_credits.py | 2 +- services/api-server/tests/unit/test_models.py | 2 +- .../tests/unit/test_models_schemas_jobs.py | 4 +-- .../tests/unit/test_services_rabbitmq.py | 12 ++++--- ...t_services_solver_job_models_converters.py | 24 ++++++------- .../auto_scaling_mode_computational.py | 4 +-- .../utils/utils_docker.py | 8 ++--- services/autoscaling/tests/unit/conftest.py | 2 +- .../autoscaling/tests/unit/test_api_health.py | 8 ++--- .../api/rest/_services_resources.py | 2 +- .../simcore_service_catalog/core/settings.py | 2 +- .../db/repositories/services.py | 2 +- .../services/director.py | 2 +- .../services/manifest.py | 2 +- .../catalog/tests/unit/with_dbs/conftest.py | 4 +-- .../with_dbs/test_api_rest_services__get.py | 2 +- .../test_api_rest_services_specifications.py | 32 ++++++++--------- .../unit/with_dbs/test_db_repositories.py | 4 +-- .../with_dbs/test_services_access_rights.py | 6 ++-- .../tests/unit/test_api_health.py | 6 ++-- .../dask-sidecar/tests/unit/test_tasks.py | 12 +++---- .../tests/unit/test_route_health.py | 4 +-- .../api/routes/dynamic_services.py | 2 +- .../models/dynamic_services_scheduler.py | 2 +- .../db/repositories/comp_tasks/_utils.py | 6 ++-- .../db/repositories/projects_networks.py | 2 +- .../repositories/user_preferences_frontend.py | 2 +- .../modules/director_v0.py | 6 ++-- .../dynamic_sidecar/api_client/_public.py | 4 ++- .../docker_service_specs/settings.py | 6 ++-- .../docker_service_specs/sidecar.py | 2 +- .../docker_service_specs/volume_remover.py | 2 +- .../scheduler/_core/_event_create_sidecars.py | 4 +-- .../scheduler/_core/_events_utils.py | 4 +-- .../scheduler/_core/_scheduler_utils.py | 2 +- .../modules/osparc_variables/_api_auth_rpc.py | 2 +- .../modules/projects_networks.py | 10 +++--- .../modules/storage.py | 2 +- .../simcore_service_director_v2/utils/dask.py | 4 +-- services/director-v2/tests/conftest.py | 4 +-- .../tests/helpers/shared_comp_utils.py | 2 +- .../integration/01/test_computation_api.py | 4 +-- .../tests/integration/02/conftest.py | 4 +-- ...t_dynamic_sidecar_nodeports_integration.py | 6 ++-- .../director-v2/tests/integration/conftest.py | 2 +- services/director-v2/tests/unit/conftest.py | 6 ++-- .../unit/test_models_dynamic_services.py | 2 +- .../tests/unit/test_modules_dask_client.py | 18 +++++----- .../unit/test_modules_dask_clients_pool.py | 2 +- .../unit/test_modules_project_networks.py | 6 ++-- .../tests/unit/test_utils_comp_scheduler.py | 2 +- .../tests/unit/with_dbs/conftest.py | 2 +- .../unit/with_dbs/test_api_route_clusters.py | 6 ++-- .../test_api_route_clusters_details.py | 6 ++-- .../with_dbs/test_api_route_computations.py | 6 ++-- .../test_api_route_dynamic_services.py | 8 ++--- .../tests/unit/with_dbs/test_cli.py | 2 +- ...t_modules_comp_scheduler_dask_scheduler.py | 10 +++--- ...test_modules_dynamic_sidecar_docker_api.py | 4 +-- ...es_dynamic_sidecar_docker_service_specs.py | 14 ++++---- .../services/director_v2/_public_client.py | 8 ++--- .../unit/api_rpc/test_api_rpc__services.py | 6 ++-- .../tests/unit/test__model_examples.py | 2 +- .../tests/unit/test_api_containers.py | 4 +-- .../tests/unit/test_core_stroage.py | 2 +- .../tests/unit/api/test_api_invitations.py | 6 ++-- .../tests/unit/api/test_api_meta.py | 2 +- services/invitations/tests/unit/conftest.py | 2 +- .../tests/unit/test__model_examples.py | 2 +- services/invitations/tests/unit/test_cli.py | 2 +- .../services/payments_gateway.py | 12 +++---- .../unit/api/test_rest_acknowledgements.py | 4 +-- .../payments/tests/unit/api/test_rest_meta.py | 2 +- .../tests/unit/test__model_examples.py | 2 +- .../test_services_auto_recharge_listener.py | 2 +- .../resource_tracker_pricing_plans.py | 4 +-- .../tests/unit/api_rest/test_api_meta.py | 2 +- .../handlers_health.py | 4 +-- .../src/simcore_service_storage/models.py | 2 +- .../src/simcore_service_storage/utils.py | 2 +- services/storage/tests/conftest.py | 10 +++--- services/storage/tests/unit/test_cli.py | 4 +-- .../storage/tests/unit/test_handlers_files.py | 8 ++--- .../tests/unit/test_handlers_health.py | 14 ++++---- .../tests/unit/test_handlers_simcore_s3.py | 2 +- .../api_keys/_api.py | 2 +- .../api_keys/_handlers.py | 6 ++-- .../simcore_service_webserver/catalog/_api.py | 2 +- .../catalog/_handlers.py | 6 ++-- .../catalog/client.py | 2 +- .../clusters/_handlers.py | 14 ++++---- .../diagnostics/_handlers.py | 2 +- .../director_v2/_core_computations.py | 2 +- .../director_v2/_handlers.py | 4 +-- .../folders/_folders_handlers.py | 12 +++---- .../groups/_handlers.py | 20 +++++------ .../login/_registration.py | 2 +- .../login/_registration_handlers.py | 2 +- .../long_running_tasks.py | 2 +- .../meta_modeling/_handlers.py | 4 +-- .../meta_modeling/_iterations.py | 6 ++-- .../payments/_methods_api.py | 2 +- .../payments/_onetime_api.py | 2 +- .../products/_handlers.py | 6 ++-- .../products/_invitations_handlers.py | 2 +- .../projects/_comments_handlers.py | 12 +++---- .../projects/_crud_api_create.py | 2 +- .../projects/_crud_api_read.py | 2 +- .../projects/_crud_handlers.py | 24 ++++++------- .../projects/_db_utils.py | 2 +- .../projects/_folders_handlers.py | 2 +- .../projects/_groups_handlers.py | 8 ++--- .../projects/_metadata_handlers.py | 4 +-- .../projects/_nodes_handlers.py | 28 +++++++-------- .../projects/_ports_api.py | 2 +- .../projects/_ports_handlers.py | 8 ++--- .../_projects_nodes_pricing_unit_handlers.py | 4 +-- .../projects/_states_handlers.py | 6 ++-- .../projects/_wallets_handlers.py | 4 +-- .../projects/_workspaces_handlers.py | 2 +- .../projects/projects_api.py | 4 +-- .../_pricing_plans_admin_handlers.py | 18 +++++----- .../resource_usage/_pricing_plans_handlers.py | 2 +- .../resource_usage/_service_runs_handlers.py | 10 +++--- .../scicrunch/_resolver.py | 4 +-- .../scicrunch/_rest.py | 2 +- .../storage/_handlers.py | 4 +-- .../simcore_service_webserver/storage/api.py | 12 +++---- .../studies_dispatcher/_projects.py | 2 +- .../tags/_handlers.py | 8 ++--- .../users/_handlers.py | 8 ++--- .../users/_notifications_handlers.py | 8 ++--- .../users/_preferences_api.py | 2 +- .../users/_preferences_db.py | 2 +- .../users/_preferences_handlers.py | 2 +- .../users/_tokens_handlers.py | 8 ++--- .../version_control/_core.py | 2 +- .../version_control/_handlers.py | 20 +++++------ .../wallets/_groups_handlers.py | 8 ++--- .../wallets/_handlers.py | 10 +++--- .../wallets/_payments_handlers.py | 30 ++++++++-------- .../workspaces/_groups_handlers.py | 8 ++--- .../workspaces/_workspaces_handlers.py | 12 +++---- services/web/server/tests/conftest.py | 2 +- .../02/scicrunch/test_scicrunch__rest.py | 2 +- .../unit/isolated/test_catalog_api_units.py | 24 ++++++------- .../unit/isolated/test_dynamic_scheduler.py | 12 +++---- .../isolated/test_garbage_collector_core.py | 2 +- .../tests/unit/isolated/test_groups_models.py | 4 +-- .../unit/isolated/test_products_model.py | 4 +-- .../unit/isolated/test_projects_utils.py | 4 +-- .../unit/isolated/test_storage_schemas.py | 2 +- .../isolated/test_studies_dispatcher_core.py | 10 +++--- .../test_studies_dispatcher_models.py | 2 +- .../unit/isolated/test_user_notifications.py | 12 +++---- .../tests/unit/isolated/test_users_models.py | 2 +- .../01/clusters/test_clusters_handlers.py | 8 ++--- .../test_studies_dispatcher_handlers.py | 2 +- .../test_studies_dispatcher_projects.py | 2 +- .../unit/with_dbs/02/test_announcements.py | 6 ++-- .../02/test_projects_cancellations.py | 2 +- .../02/test_projects_crud_handlers__clone.py | 2 +- ...handlers__clone_in_workspace_and_folder.py | 2 +- .../02/test_projects_nodes_handler.py | 2 +- .../02/test_projects_ports_handlers.py | 2 +- .../02/test_projects_states_handlers.py | 4 +-- .../unit/with_dbs/03/folders/test_folders.py | 8 ++--- .../unit/with_dbs/03/invitations/conftest.py | 4 +-- ...login_handlers_registration_invitations.py | 6 ++-- .../test_products__invitations_handlers.py | 6 ++-- .../03/login/test_login_registration.py | 2 +- .../test_meta_modeling_iterations.py | 10 +++--- .../tests/unit/with_dbs/03/test_email.py | 4 +-- .../tests/unit/with_dbs/03/test_users.py | 4 +-- .../with_dbs/03/test_users__notifications.py | 2 +- .../with_dbs/03/version_control/conftest.py | 2 +- .../version_control/test_version_control.py | 2 +- .../test_version_control_handlers.py | 34 +++++++++---------- .../with_dbs/03/wallets/payments/conftest.py | 2 +- .../03/wallets/payments/test_payments.py | 6 ++-- .../wallets/payments/test_payments_methods.py | 18 +++++----- .../with_dbs/03/workspaces/test_workspaces.py | 4 +-- 192 files changed, 569 insertions(+), 559 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index 0d265248dc2..a6e71677950 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -282,7 +282,7 @@ async def create_project( ) response.raise_for_status() result = await self._wait_for_long_running_task_results(response) - return ProjectGet.parse_obj(result) + return ProjectGet.model_validate(result) @_exception_mapper(_JOB_STATUS_MAP) async def clone_project( @@ -307,7 +307,7 @@ async def clone_project( ) response.raise_for_status() result = await self._wait_for_long_running_task_results(response) - return ProjectGet.parse_obj(result) + return ProjectGet.model_validate(result) @_exception_mapper(_JOB_STATUS_MAP) async def get_project(self, *, project_id: UUID) -> ProjectGet: diff --git a/services/api-server/tests/unit/_with_db/test_api_user.py b/services/api-server/tests/unit/_with_db/test_api_user.py index 87d2de26c64..b20c1727be5 100644 --- a/services/api-server/tests/unit/_with_db/test_api_user.py +++ b/services/api-server/tests/unit/_with_db/test_api_user.py @@ -86,6 +86,6 @@ async def test_update_profile( ) assert resp.status_code == status.HTTP_200_OK, resp.text - profile = Profile.parse_obj(resp.json()) + profile = Profile.model_validate(resp.json()) assert profile.first_name == "Oliver" assert profile.last_name == "Heaviside" diff --git a/services/api-server/tests/unit/api_solvers/conftest.py b/services/api-server/tests/unit/api_solvers/conftest.py index a7b813776da..ec8bf7d5630 100644 --- a/services/api-server/tests/unit/api_solvers/conftest.py +++ b/services/api-server/tests/unit/api_solvers/conftest.py @@ -96,8 +96,8 @@ async def mocked_directorv2_service( stop_time: Final[datetime] = datetime.now() + timedelta(seconds=5) def _get_computation(request: httpx.Request, **kwargs) -> httpx.Response: - task = ComputationTaskGet.parse_obj( - ComputationTaskGet.Config.schema_extra["examples"][0] + task = ComputationTaskGet.model_validate( + ComputationTaskGet.model_config["json_schema_extra"]["examples"][0] ) if datetime.now() > stop_time: task.state = RunningState.SUCCESS diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py index ed3ae76cfbd..d26c29b0ce5 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py @@ -247,7 +247,7 @@ async def test_run_solver_job( ).respond( status.HTTP_201_CREATED, json=jsonable_encoder( - ComputationTaskGet.parse_obj( + ComputationTaskGet.model_validate( { "id": project_id, "state": "UNKNOWN", @@ -358,7 +358,7 @@ async def test_run_solver_job( assert mocked_webserver_service_api["get_task_status"].called assert mocked_webserver_service_api["get_task_result"].called - job = Job.parse_obj(resp.json()) + job = Job.model_validate(resp.json()) # Start Job resp = await client.post( @@ -369,5 +369,5 @@ async def test_run_solver_job( assert resp.status_code == status.HTTP_202_ACCEPTED assert mocked_directorv2_service_api["inspect_computation"].called - job_status = JobStatus.parse_obj(resp.json()) + job_status = JobStatus.model_validate(resp.json()) assert job_status.progress == 0.0 diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py index a30404606d7..eb821e46d01 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_logs.py @@ -76,7 +76,7 @@ def fake_project_for_streaming( assert isinstance(response_body := GET_PROJECT.response_body, dict) assert (data := response_body.get("data")) is not None - fake_project = ProjectGet.parse_obj(data) + fake_project = ProjectGet.model_validate(data) fake_project.workbench = {faker.uuid4(): faker.uuid4()} mocker.patch( "simcore_service_api_server.api.dependencies.webserver.AuthSession.get_project", @@ -113,8 +113,8 @@ async def test_log_streaming( response.raise_for_status() if not disconnect: async for line in response.aiter_lines(): - job_log = JobLog.parse_raw(line) - pprint(job_log.json()) + job_log = JobLog.model_validate_json(line) + pprint(job_log.model_dump()) collected_messages += job_log.messages assert fake_log_distributor.deregister_is_called @@ -160,12 +160,12 @@ async def test_logstreaming_job_not_found_exception( response.raise_for_status() async for line in response.aiter_lines(): try: - job_log = JobLog.parse_raw(line) - pprint(job_log.json()) + job_log = JobLog.model_validate_json(line) + pprint(job_log.model_dump()) except ValidationError: - error = ErrorGet.parse_raw(line) + error = ErrorGet.model_validate_json(line) _received_error = True - print(error.json()) + print(error.model_dump()) assert fake_log_distributor.deregister_is_called assert _received_error diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py index ccf9b40b565..8afb38ca86e 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py @@ -112,10 +112,10 @@ async def test_get_and_update_job_metadata( "title": "Temperature", "enabled": True, } - ).dict(), + ).model_dump(), ) assert resp.status_code == status.HTTP_201_CREATED - job = Job.parse_obj(resp.json()) + job = Job.model_validate(resp.json()) # Get metadata resp = await client.get( @@ -123,7 +123,7 @@ async def test_get_and_update_job_metadata( auth=auth, ) assert resp.status_code == status.HTTP_200_OK - job_meta = JobMetadata.parse_obj(resp.json()) + job_meta = JobMetadata.model_validate(resp.json()) assert job_meta.metadata == {} @@ -132,11 +132,11 @@ async def test_get_and_update_job_metadata( resp = await client.patch( f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}/metadata", auth=auth, - json=JobMetadataUpdate(metadata=my_metadata).dict(), + json=JobMetadataUpdate(metadata=my_metadata).model_dump(), ) assert resp.status_code == status.HTTP_200_OK - job_meta = JobMetadata.parse_obj(resp.json()) + job_meta = JobMetadata.model_validate(resp.json()) assert job_meta.metadata == my_metadata # Get metadata after update @@ -145,7 +145,7 @@ async def test_get_and_update_job_metadata( auth=auth, ) assert resp.status_code == status.HTTP_200_OK - job_meta = JobMetadata.parse_obj(resp.json()) + job_meta = JobMetadata.model_validate(resp.json()) assert job_meta.metadata == my_metadata diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py index 61d91fa9d94..ece3fcd1f57 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies_jobs.py @@ -358,7 +358,7 @@ async def test_get_job_logs( f"{API_VTAG}/studies/{_study_id}/jobs/{_job_id}/outputs/log-links", auth=auth ) assert response.status_code == status.HTTP_200_OK - _ = JobLogsMap.parse_obj(response.json()) + _ = JobLogsMap.model_validate(response.json()) async def test_get_study_outputs( @@ -394,17 +394,17 @@ async def test_get_study_outputs( }, ) assert response.status_code == status.HTTP_200_OK - _job = Job.parse_obj(response.json()) + _job = Job.model_validate(response.json()) _job_id = _job.id response = await client.post( f"/{API_VTAG}/studies/{_study_id}/jobs/{_job_id}:start", auth=auth ) assert response.status_code == status.HTTP_202_ACCEPTED - _ = JobStatus.parse_obj(response.json()) + _ = JobStatus.model_validate(response.json()) response = await client.post( f"/{API_VTAG}/studies/{_study_id}/jobs/{_job_id}/outputs", auth=auth ) assert response.status_code == status.HTTP_200_OK - _ = JobOutputs.parse_obj(response.json()) + _ = JobOutputs.model_validate(response.json()) diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index e8324bcc0b7..99d248783c2 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -511,7 +511,7 @@ def create_project_task(self, request: httpx.Request): if from_study := query.get("from_study"): return self.clone_project_task(request=request, project_id=from_study) project_create = json.loads(request.content) - project_get = ProjectGet.parse_obj( + project_get = ProjectGet.model_validate( { "creationDate": "2018-07-01T11:13:43Z", "lastChangeDate": "2018-07-01T11:13:43Z", @@ -525,7 +525,7 @@ def create_project_task(self, request: httpx.Request): def clone_project_task(self, request: httpx.Request, *, project_id: str): assert GET_PROJECT.response_body - project_get = ProjectGet.parse_obj( + project_get = ProjectGet.model_validate( { "creationDate": "2018-07-01T11:13:43Z", "lastChangeDate": "2018-07-01T11:13:43Z", diff --git a/services/api-server/tests/unit/test_api_files.py b/services/api-server/tests/unit/test_api_files.py index 4d45e0e5528..8c308bfa2f2 100644 --- a/services/api-server/tests/unit/test_api_files.py +++ b/services/api-server/tests/unit/test_api_files.py @@ -251,7 +251,9 @@ async def test_get_upload_links( payload: dict[str, str] = response.json() assert response.status_code == status.HTTP_200_OK - client_upload_schema: ClientFileUploadData = ClientFileUploadData.parse_obj(payload) + client_upload_schema: ClientFileUploadData = ClientFileUploadData.model_validate( + payload + ) if follow_up_request == "complete": body = { diff --git a/services/api-server/tests/unit/test_api_solver_jobs.py b/services/api-server/tests/unit/test_api_solver_jobs.py index 524adc7300c..0872b25b873 100644 --- a/services/api-server/tests/unit/test_api_solver_jobs.py +++ b/services/api-server/tests/unit/test_api_solver_jobs.py @@ -17,7 +17,7 @@ from models_library.api_schemas_webserver.resource_usage import PricingUnitGet from models_library.api_schemas_webserver.wallets import WalletGetWithAvailableCredits from models_library.generics import Envelope -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import ( CreateRespxMockCallback, HttpApiCallCaptureModel, @@ -182,7 +182,7 @@ def _get_pricing_unit_side_effect( ) if capture_file == "get_job_pricing_unit_success.json": assert response.status_code == status.HTTP_200_OK - _ = parse_obj_as(PricingUnitGet, response.json()) + _ = TypeAdapter(PricingUnitGet).validate_python(response.json()) elif capture_file == "get_job_pricing_unit_invalid_job.json": assert response.status_code == status.HTTP_404_NOT_FOUND elif capture_file == "get_job_pricing_unit_invalid_solver.json": @@ -342,7 +342,7 @@ async def test_start_solver_job_conflict( ) assert response.status_code == status.HTTP_200_OK - job_status = JobStatus.parse_obj(response.json()) + job_status = JobStatus.model_validate(response.json()) assert f"{job_status.job_id}" == _job_id @@ -364,7 +364,7 @@ def _stop_job_side_effect( path_params: dict[str, Any], capture: HttpApiCallCaptureModel, ) -> Any: - task = ComputationTaskGet.parse_obj(capture.response_body) + task = ComputationTaskGet.model_validate(capture.response_body) task.id = UUID(_job_id) return jsonable_encoder(task) @@ -384,7 +384,7 @@ def _stop_job_side_effect( ) assert response.status_code == status.HTTP_200_OK - status_ = JobStatus.parse_obj(response.json()) + status_ = JobStatus.model_validate(response.json()) assert status_.job_id == UUID(_job_id) @@ -416,7 +416,7 @@ def _wallet_side_effect( path_params: dict[str, Any], capture: HttpApiCallCaptureModel, ): - wallet = parse_obj_as( + wallet = TypeAdapter( Envelope[WalletGetWithAvailableCredits], capture.response_body ).data assert wallet is not None diff --git a/services/api-server/tests/unit/test_credits.py b/services/api-server/tests/unit/test_credits.py index 8c2dfd7dd74..3630e218754 100644 --- a/services/api-server/tests/unit/test_credits.py +++ b/services/api-server/tests/unit/test_credits.py @@ -23,4 +23,4 @@ async def test_get_credits_price( response = await client.get(f"{API_VTAG}/credits/price", auth=auth) assert response.status_code == status.HTTP_200_OK - _ = GetCreditPrice.parse_obj(response.json()) + _ = GetCreditPrice.model_validate(response.json()) diff --git a/services/api-server/tests/unit/test_models.py b/services/api-server/tests/unit/test_models.py index 06ee47d86c4..b3e1f48a57a 100644 --- a/services/api-server/tests/unit/test_models.py +++ b/services/api-server/tests/unit/test_models.py @@ -21,7 +21,7 @@ def test_api_server_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" diff --git a/services/api-server/tests/unit/test_models_schemas_jobs.py b/services/api-server/tests/unit/test_models_schemas_jobs.py index afb1d6e1966..b8c646feaf2 100644 --- a/services/api-server/tests/unit/test_models_schemas_jobs.py +++ b/services/api-server/tests/unit/test_models_schemas_jobs.py @@ -37,8 +37,8 @@ def _deepcopy_and_shuffle(src): return deepcopy(src) shuffled_raw = _deepcopy_and_shuffle(raw) - inputs1 = JobInputs.parse_obj(raw) - inputs2 = JobInputs.parse_obj(shuffled_raw) + inputs1 = JobInputs.model_validate(raw) + inputs2 = JobInputs.model_validate(shuffled_raw) print(inputs1) print(inputs2) diff --git a/services/api-server/tests/unit/test_services_rabbitmq.py b/services/api-server/tests/unit/test_services_rabbitmq.py index ee68615c8f1..6f2f2ca546f 100644 --- a/services/api-server/tests/unit/test_services_rabbitmq.py +++ b/services/api-server/tests/unit/test_services_rabbitmq.py @@ -332,8 +332,8 @@ async def log_streamer_with_distributor( log_distributor: LogDistributor, ) -> AsyncIterable[LogStreamer]: def _get_computation(request: httpx.Request, **kwargs) -> httpx.Response: - task = ComputationTaskGet.parse_obj( - ComputationTaskGet.Config.schema_extra["examples"][0] + task = ComputationTaskGet.model_validate( + ComputationTaskGet.model_config["json_schema_extra"]["examples"][0] ) if computation_done(): task.state = RunningState.SUCCESS @@ -420,13 +420,13 @@ def routing_key(self) -> str: log_level=logging.INFO, ) with pytest.raises(ValidationError): - LoggerRabbitMessage.parse_obj(log_rabbit_message.dict()) + LoggerRabbitMessage.model_validate(log_rabbit_message.model_dump()) await produce_logs("expected", log_message=log_rabbit_message) ii: int = 0 async for log in log_streamer_with_distributor.log_generator(): - _ = JobLog.parse_raw(log) + _ = JobLog.model_validate_json(log) ii += 1 assert ii == 0 @@ -448,7 +448,9 @@ async def test_log_generator(mocker: MockFixture, faker: Faker): published_logs: list[str] = [] for _ in range(10): - job_log = JobLog.parse_obj(JobLog.Config.schema_extra["example"]) + job_log = JobLog.model_validate( + JobLog.model_config["json_schema_extra"]["example"] + ) msg = faker.text() published_logs.append(msg) job_log.messages = [msg] diff --git a/services/api-server/tests/unit/test_services_solver_job_models_converters.py b/services/api-server/tests/unit/test_services_solver_job_models_converters.py index 28f8be422f9..3141a3a9aee 100644 --- a/services/api-server/tests/unit/test_services_solver_job_models_converters.py +++ b/services/api-server/tests/unit/test_services_solver_job_models_converters.py @@ -6,7 +6,7 @@ from faker import Faker from models_library.projects import Project from models_library.projects_nodes import InputsDict, InputTypes, SimCoreFileLink -from pydantic import create_model, parse_obj_as +from pydantic import TypeAdapter, create_model from simcore_service_api_server.models.schemas.files import File from simcore_service_api_server.models.schemas.jobs import ArgumentTypes, Job, JobInputs from simcore_service_api_server.models.schemas.solvers import Solver @@ -20,7 +20,7 @@ def test_create_project_model_for_job(faker: Faker): - solver = Solver.parse_obj( + solver = Solver.model_validate( { "id": "simcore/services/comp/itis/sleeper", "version": "2.0.2", @@ -31,7 +31,7 @@ def test_create_project_model_for_job(faker: Faker): } ) - inputs = JobInputs.parse_obj( + inputs = JobInputs.model_validate( { "values": { "input_3": False, # Fail after sleep ? @@ -46,7 +46,7 @@ def test_create_project_model_for_job(faker: Faker): } ) - print(inputs.json(indent=2)) + print(inputs.model_dump_json(indent=2)) job = Job.create_solver_job(solver=solver, inputs=inputs) @@ -77,7 +77,7 @@ def test_job_to_node_inputs_conversion(): } ) for value in job_inputs.values.values(): - assert parse_obj_as(ArgumentTypes, value) == value + assert TypeAdapter(ArgumentTypes).validate_python(value) == value node_inputs: InputsDict = { "x": 4.33, @@ -94,14 +94,14 @@ def test_job_to_node_inputs_conversion(): } for value in node_inputs.values(): - assert parse_obj_as(InputTypes, value) == value + assert TypeAdapter(InputTypes).validate_python(value) == value # test transformations in both directions got_node_inputs = create_node_inputs_from_job_inputs(inputs=job_inputs) got_job_inputs = create_job_inputs_from_node_inputs(inputs=node_inputs) NodeInputs = create_model("NodeInputs", __root__=(dict[str, InputTypes], ...)) - print(NodeInputs.parse_obj(got_node_inputs).json(indent=2)) + print(NodeInputs.model_validate(got_node_inputs).model_dump_json(indent=2)) print(got_job_inputs.json(indent=2)) assert got_job_inputs == job_inputs @@ -109,7 +109,7 @@ def test_job_to_node_inputs_conversion(): def test_create_job_from_project(faker: Faker): - project = Project.parse_obj( + project = Project.model_validate( { "uuid": "f925e30f-19de-42dc-acab-3ce93ea0a0a7", "name": "simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/2.0.2/jobs/f925e30f-19de-42dc-acab-3ce93ea0a0a7", @@ -181,7 +181,7 @@ def test_create_job_from_project(faker: Faker): }, ) - expected_job = Job.parse_obj( + expected_job = Job.model_validate( { "id": "f925e30f-19de-42dc-acab-3ce93ea0a0a7", "name": "simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/2.0.2/jobs/f925e30f-19de-42dc-acab-3ce93ea0a0a7", @@ -207,12 +207,12 @@ def fake_url_for(*args, **kwargs): assert job.id == project.uuid assert job.name == project.name - url_field_names = {name for name in job.__fields__ if name.endswith("url")} + url_field_names = {name for name in job.model_fields if name.endswith("url")} assert all(getattr(job, _) for _ in url_field_names) # this tends to be a problem assert job.inputs_checksum == expected_job.inputs_checksum - assert job.dict(exclude=url_field_names) == expected_job.dict( + assert job.model_dump(exclude=url_field_names) == expected_job.model_dump( exclude=url_field_names ) @@ -222,7 +222,7 @@ def test_create_jobstatus_from_task(): from simcore_service_api_server.models.schemas.jobs import JobStatus from simcore_service_api_server.services.director_v2 import ComputationTaskGet - task = ComputationTaskGet.parse_obj({}) # TODO: + task = ComputationTaskGet.model_validate({}) # TODO: job_status: JobStatus = create_jobstatus_from_task(task) assert job_status.job_id == task.id diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py index 0e3862e51b1..8589629cc63 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py @@ -151,10 +151,10 @@ async def compute_cluster_used_resources( for i in instances ) ) - counter = collections.Counter({k: 0 for k in Resources.__fields__}) + counter = collections.Counter({k: 0 for k in Resources.model_fields}) for result in list_of_used_resources: counter.update(result.dict()) - return Resources.parse_obj(dict(counter)) + return Resources.model_validate(dict(counter)) @staticmethod async def compute_cluster_total_resources( diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py index fe782f66b2a..ad18a3cffb6 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py @@ -264,7 +264,7 @@ async def compute_cluster_total_resources(nodes: list[Node]) -> Resources: } ) - return Resources.parse_obj(dict(cluster_resources_counter)) + return Resources.model_validate(dict(cluster_resources_counter)) def get_max_resources_from_docker_task(task: Task) -> Resources: @@ -370,7 +370,7 @@ async def compute_node_used_resources( "cpus": task_reservations.get("NanoCPUs", 0) / _NANO_CPU, } ) - return Resources.parse_obj(dict(cluster_resources_counter)) + return Resources.model_validate(dict(cluster_resources_counter)) async def compute_cluster_used_resources( @@ -380,11 +380,11 @@ async def compute_cluster_used_resources( list_of_used_resources = await logged_gather( *(compute_node_used_resources(docker_client, node) for node in nodes) ) - counter = collections.Counter({k: 0 for k in Resources.__fields__}) + counter = collections.Counter({k: 0 for k in Resources.model_fields}) for result in list_of_used_resources: counter.update(result.dict()) - return Resources.parse_obj(dict(counter)) + return Resources.model_validate(dict(counter)) _COMMAND_TIMEOUT_S = 10 diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index b8ad45dca78..511434df88b 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -732,7 +732,7 @@ def host_memory_total() -> ByteSize: def osparc_docker_label_keys( faker: Faker, ) -> StandardSimcoreDockerLabels: - return StandardSimcoreDockerLabels.parse_obj( + return StandardSimcoreDockerLabels.model_validate( { "user_id": faker.pyint(), "project_id": faker.uuid4(), diff --git a/services/autoscaling/tests/unit/test_api_health.py b/services/autoscaling/tests/unit/test_api_health.py index 353aabf31a4..e3c22afddac 100644 --- a/services/autoscaling/tests/unit/test_api_health.py +++ b/services/autoscaling/tests/unit/test_api_health.py @@ -42,7 +42,7 @@ async def test_status_no_rabbit( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -66,7 +66,7 @@ async def test_status_no_ssm( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -94,7 +94,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True @@ -114,7 +114,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True diff --git a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py index 701c4b41f3d..1d3ef4515a3 100644 --- a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py +++ b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py @@ -277,7 +277,7 @@ async def get_service_resources( spec_service_resources, user_specific_service_specs.service ) - service_to_resources[spec_key] = ImageResources.parse_obj( + service_to_resources[spec_key] = ImageResources.model_validate( { "image": image, "resources": spec_service_resources, diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py index 01781d7ded6..a3d8fbf2f01 100644 --- a/services/catalog/src/simcore_service_catalog/core/settings.py +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -43,7 +43,7 @@ def base_url(self) -> str: _DEFAULT_SERVICE_SPECIFICATIONS: Final[ ServiceSpecifications -] = ServiceSpecifications.parse_obj({}) +] = ServiceSpecifications.model_validate({}) class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/services.py b/services/catalog/src/simcore_service_catalog/db/repositories/services.py index 0f611c932b8..0f9be889f78 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/services.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/services.py @@ -617,5 +617,5 @@ async def get_service_specifications( if merged_specifications := _merge_specs( everyone_specs, teams_specs, primary_specs ): - return ServiceSpecifications.parse_obj(merged_specifications) + return ServiceSpecifications.model_validate(merged_specifications) return None # mypy diff --git a/services/catalog/src/simcore_service_catalog/services/director.py b/services/catalog/src/simcore_service_catalog/services/director.py index 0b48a1f3856..d341b18488c 100644 --- a/services/catalog/src/simcore_service_catalog/services/director.py +++ b/services/catalog/src/simcore_service_catalog/services/director.py @@ -148,7 +148,7 @@ async def get_service( # NOTE: the fact that it returns a list of one element is a defect of the director API assert isinstance(data, list) # nosec assert len(data) == 1 # nosec - return ServiceMetaDataPublished.parse_obj(data[0]) + return ServiceMetaDataPublished.model_validate(data[0]) async def setup_director(app: FastAPI) -> None: diff --git a/services/catalog/src/simcore_service_catalog/services/manifest.py b/services/catalog/src/simcore_service_catalog/services/manifest.py index aa6caf52618..bf7c26a6b63 100644 --- a/services/catalog/src/simcore_service_catalog/services/manifest.py +++ b/services/catalog/src/simcore_service_catalog/services/manifest.py @@ -64,7 +64,7 @@ async def get_services_map( } for service in services_in_registry: try: - service_data = ServiceMetaDataPublished.parse_obj(service) + service_data = ServiceMetaDataPublished.model_validate(service) services[(service_data.key, service_data.version)] = service_data except ValidationError: # noqa: PERF203 diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 1681622314f..b5284c22259 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -346,7 +346,7 @@ def _fake_factory(**overrides): data = deepcopy(template) data.update(**overrides) - assert ServiceMetaDataPublished.parse_obj( + assert ServiceMetaDataPublished.model_validate( data ), "Invalid fake data. Out of sync!" return data @@ -463,7 +463,7 @@ def _( ): return [ jsonable_encoder( - _Loader.parse_obj( + _Loader.model_validate( { **next(itertools.cycle(expected_director_list_services)), **data[0], # service, **access_rights = data diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services__get.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services__get.py index a3c85d3f31b..d4ca2539eb8 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rest_services__get.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services__get.py @@ -90,7 +90,7 @@ def test_get_service_with_details( assert response.status_code == 200 - got = ServiceGet.parse_obj(response.json()) + got = ServiceGet.model_validate(response.json()) assert got.key == service_key assert got.version == service_version diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py index f8515b57298..c21d8d0f214 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services_specifications.py @@ -155,7 +155,7 @@ async def test_get_service_specifications_of_unknown_service_returns_default_spe ).with_query(user_id=user_id) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs assert ( @@ -200,7 +200,7 @@ async def test_get_service_specifications( # this should now return default specs since there are none in the db response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs assert ( service_specs @@ -215,9 +215,9 @@ async def test_get_service_specifications( await services_specifications_injector(everyone_service_specs) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs - assert service_specs == ServiceSpecifications.parse_obj( + assert service_specs == ServiceSpecifications.model_validate( everyone_service_specs.dict() ) @@ -228,9 +228,9 @@ async def test_get_service_specifications( await services_specifications_injector(standard_group_service_specs) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs - assert service_specs == ServiceSpecifications.parse_obj( + assert service_specs == ServiceSpecifications.model_validate( everyone_service_specs.dict() ) @@ -239,9 +239,9 @@ async def test_get_service_specifications( await conn.execute(user_to_groups.insert().values(uid=user_id, gid=team_gid)) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs - assert service_specs == ServiceSpecifications.parse_obj( + assert service_specs == ServiceSpecifications.model_validate( standard_group_service_specs.dict() ) @@ -252,9 +252,9 @@ async def test_get_service_specifications( await services_specifications_injector(user_group_service_specs) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs - assert service_specs == ServiceSpecifications.parse_obj( + assert service_specs == ServiceSpecifications.model_validate( user_group_service_specs.dict() ) @@ -327,7 +327,7 @@ async def test_get_service_specifications_are_passed_to_newer_versions_of_servic ) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs assert ( service_specs @@ -343,9 +343,9 @@ async def test_get_service_specifications_are_passed_to_newer_versions_of_servic ) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs - assert service_specs == ServiceSpecifications.parse_obj( + assert service_specs == ServiceSpecifications.model_validate( version_speced[0].dict() ), f"specifications for {version=} are not passed down from {sorted_versions[INDEX_FIRST_SERVICE_VERSION_WITH_SPEC]}" @@ -356,9 +356,9 @@ async def test_get_service_specifications_are_passed_to_newer_versions_of_servic ) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs - assert service_specs == ServiceSpecifications.parse_obj( + assert service_specs == ServiceSpecifications.model_validate( version_speced[1].dict() ), f"specifications for {version=} are not passed down from {sorted_versions[INDEX_SECOND_SERVICE_VERSION_WITH_SPEC]}" @@ -369,7 +369,7 @@ async def test_get_service_specifications_are_passed_to_newer_versions_of_servic ) response = client.get(f"{url}") assert response.status_code == status.HTTP_200_OK - service_specs = ServiceSpecificationsGet.parse_obj(response.json()) + service_specs = ServiceSpecificationsGet.model_validate(response.json()) assert service_specs if version in versions_with_specs: assert ( diff --git a/services/catalog/tests/unit/with_dbs/test_db_repositories.py b/services/catalog/tests/unit/with_dbs/test_db_repositories.py index 3438492f740..21a603edf74 100644 --- a/services/catalog/tests/unit/with_dbs/test_db_repositories.py +++ b/services/catalog/tests/unit/with_dbs/test_db_repositories.py @@ -109,9 +109,9 @@ async def test_create_services( ) # validation - service = ServiceMetaDataAtDB.parse_obj(fake_service) + service = ServiceMetaDataAtDB.model_validate(fake_service) service_access_rights = [ - ServiceAccessRightsAtDB.parse_obj(a) for a in fake_access_rights + ServiceAccessRightsAtDB.model_validate(a) for a in fake_access_rights ] new_service = await services_repo.create_or_update_service( diff --git a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py index d4506855f6d..4affcb83c58 100644 --- a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py +++ b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py @@ -27,7 +27,7 @@ def test_reduce_access_rights(): - sample = ServiceAccessRightsAtDB.parse_obj( + sample = ServiceAccessRightsAtDB.model_validate( { "key": "simcore/services/dynamic/sim4life", "version": "1.0.9", @@ -102,7 +102,7 @@ async def test_auto_upgrade_policy( data["gid"] = everyone_gid mocker.patch( "simcore_service_catalog.services.access_rights.GroupsRepository.get_everyone_group", - return_value=GroupAtDB.parse_obj(data), + return_value=GroupAtDB.model_validate(data), ) mocker.patch( "simcore_service_catalog.services.access_rights.GroupsRepository.get_user_gid_from_email", @@ -111,7 +111,7 @@ async def test_auto_upgrade_policy( # SETUP --- MOST_UPDATED_EXAMPLE = -1 - new_service_metadata = ServiceMetaDataPublished.parse_obj( + new_service_metadata = ServiceMetaDataPublished.model_validate( ServiceMetaDataPublished.Config.schema_extra["examples"][MOST_UPDATED_EXAMPLE] ) new_service_metadata.version = parse_obj_as(ServiceVersion, "1.0.11") diff --git a/services/clusters-keeper/tests/unit/test_api_health.py b/services/clusters-keeper/tests/unit/test_api_health.py index 734620afa1b..a2a14bcf72c 100644 --- a/services/clusters-keeper/tests/unit/test_api_health.py +++ b/services/clusters-keeper/tests/unit/test_api_health.py @@ -40,7 +40,7 @@ async def test_status_no_rabbit( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is False @@ -60,7 +60,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True @@ -75,7 +75,7 @@ async def test_status( response = await async_client.get("/status") response.raise_for_status() assert response.status_code == status.HTTP_200_OK - status_response = _StatusGet.parse_obj(response.json()) + status_response = _StatusGet.model_validate(response.json()) assert status_response assert status_response.rabbitmq.is_enabled is True diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 4aff3a1fd3d..88eaf23c15b 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -198,7 +198,7 @@ def sleeper_task( list_of_files = [file_on_s3_server() for _ in range(NUM_FILES)] # defines the inputs of the task - input_data = TaskInputData.parse_obj( + input_data = TaskInputData.model_validate( { "input_1": 23, "input_23": "a string input", @@ -276,7 +276,7 @@ def sleeper_task( "pytest_bool": False, } output_file_url = s3_remote_file_url(file_path="output_file") - expected_output_keys = TaskOutputDataSchema.parse_obj( + expected_output_keys = TaskOutputDataSchema.model_validate( { **( {k: {"required": True} for k in jsonable_outputs} @@ -295,7 +295,7 @@ def sleeper_task( ), } ) - expected_output_data = TaskOutputData.parse_obj( + expected_output_data = TaskOutputData.model_validate( { **( jsonable_outputs @@ -395,10 +395,10 @@ def _creator(command: list[str] | None = None) -> ServiceExampleParam: service_version="latest", command=command or ["/bin/bash", "-c", "echo 'hello I'm an empty ubuntu task!"], - input_data=TaskInputData.parse_obj({}), - output_data_keys=TaskOutputDataSchema.parse_obj({}), + input_data=TaskInputData.model_validate({}), + output_data_keys=TaskOutputDataSchema.model_validate({}), log_file_url=s3_remote_file_url(file_path="log.dat"), - expected_output_data=TaskOutputData.parse_obj({}), + expected_output_data=TaskOutputData.model_validate({}), expected_logs=[], integration_version=integration_version, task_envs={}, diff --git a/services/datcore-adapter/tests/unit/test_route_health.py b/services/datcore-adapter/tests/unit/test_route_health.py index 7ab697612c4..3f0b1712f7e 100644 --- a/services/datcore-adapter/tests/unit/test_route_health.py +++ b/services/datcore-adapter/tests/unit/test_route_health.py @@ -31,7 +31,7 @@ async def test_check_subsystem_health(async_client: httpx.AsyncClient): assert pennsieve_health_route.called assert response.status_code == status.HTTP_200_OK - app_status = AppStatusCheck.parse_obj(response.json()) + app_status = AppStatusCheck.model_validate(response.json()) assert app_status assert app_status.app_name == "simcore-service-datcore-adapter" assert app_status.services == {"pennsieve": True} @@ -43,7 +43,7 @@ async def test_check_subsystem_health(async_client: httpx.AsyncClient): assert pennsieve_health_route.called assert response.status_code == status.HTTP_200_OK - app_status = AppStatusCheck.parse_obj(response.json()) + app_status = AppStatusCheck.model_validate(response.json()) assert app_status assert app_status.app_name == "simcore-service-datcore-adapter" assert app_status.services == {"pennsieve": False} diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py index a2a99f4bea3..e688112cb75 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py @@ -287,7 +287,7 @@ async def service_retrieve_data_on_ports( ) # validate and return - return RetrieveDataOutEnveloped.parse_obj(response.json()) + return RetrieveDataOutEnveloped.model_validate(response.json()) @router.post( diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 33272e9f946..c5e42263fe4 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -499,7 +499,7 @@ def from_http_request( } if run_id: obj_dict["run_id"] = run_id - return cls.parse_obj(obj_dict) + return cls.model_validate(obj_dict) @validator("user_preferences_path", pre=True) @classmethod diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index a33f689e9da..012a059fc62 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -105,7 +105,7 @@ def _compute_node_requirements( node_defined_resources[resource_name] = node_defined_resources.get( resource_name, 0 ) + min(resource_value.limit, resource_value.reservation) - return NodeRequirements.parse_obj(node_defined_resources) + return NodeRequirements.model_validate(node_defined_resources) def _compute_node_boot_mode(node_resources: ServiceResourcesDict) -> BootMode: @@ -187,7 +187,7 @@ async def _generate_task_image( data.update(envs=_compute_node_envs(node_labels)) if node_extras and node_extras.container_spec: data.update(command=node_extras.container_spec.command) - return Image.parse_obj(data) + return Image.model_validate(data) async def _get_pricing_and_hardware_infos( @@ -430,7 +430,7 @@ async def generate_tasks_list_from_project( task_db = CompTaskAtDB( project_id=project.uuid, node_id=NodeID(node_id), - schema=NodeSchema.parse_obj( + schema=NodeSchema.model_validate( node_details.dict( exclude_unset=True, by_alias=True, include={"inputs", "outputs"} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py index fe033761128..172c577ee56 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py @@ -28,7 +28,7 @@ async def get_projects_networks(self, project_id: ProjectID) -> ProjectsNetworks async def upsert_projects_networks( self, project_id: ProjectID, networks_with_aliases: NetworksWithAliases ) -> None: - projects_networks_to_insert = ProjectsNetworks.parse_obj( + projects_networks_to_insert = ProjectsNetworks.model_validate( {"project_uuid": project_id, "networks_with_aliases": networks_with_aliases} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py index 01d7fdcce61..0ce81c14bb6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py @@ -31,5 +31,5 @@ async def get_user_preference( return ( None if preference_payload is None - else preference_class.parse_obj(preference_payload) + else preference_class.model_validate(preference_payload) ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py index 0bc8c799dcb..322e5281e46 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py @@ -83,7 +83,7 @@ async def get_service_extras( f"/service_extras/{urllib.parse.quote_plus(service_key)}/{service_version}", ) if resp.status_code == status.HTTP_200_OK: - return ServiceExtras.parse_obj(unenvelope_or_raise_error(resp)) + return ServiceExtras.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) @log_decorator(logger=logger) @@ -94,7 +94,7 @@ async def get_running_service_details( "GET", f"running_interactive_services/{service_uuid}" ) if resp.status_code == status.HTTP_200_OK: - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( unenvelope_or_raise_error(resp) ) raise HTTPException(status_code=resp.status_code, detail=resp.content) @@ -109,7 +109,7 @@ async def get_service_labels( ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - return SimcoreServiceLabels.parse_obj(unenvelope_or_raise_error(resp)) + return SimcoreServiceLabels.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) @log_decorator(logger=logger) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index 190a35a315c..8eac1dfd2aa 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -489,7 +489,9 @@ async def get_service_activity( dynamic_sidecar_endpoint ) decoded_response = response.json() - return ActivityInfo.parse_obj(decoded_response) if decoded_response else None + return ( + ActivityInfo.model_validate(decoded_response) if decoded_response else None + ) async def free_reserved_disk_space( self, dynamic_sidecar_endpoint: AnyHttpUrl diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index 5100d63bab0..f7c842747f4 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -304,7 +304,7 @@ def _merge_resources_in_settings( # merge all resources empty_resource_entry: SimcoreServiceSettingLabelEntry = ( - SimcoreServiceSettingLabelEntry.parse_obj( + SimcoreServiceSettingLabelEntry.model_validate( { "name": "Resources", "type": "Resources", @@ -406,7 +406,7 @@ def _get_boot_options( boot_options = json.loads(boot_options_encoded)["boot-options"] log.debug("got boot_options=%s", boot_options) - return {k: BootOption.parse_obj(v) for k, v in boot_options.items()} + return {k: BootOption.model_validate(v) for k, v in boot_options.items()} def _assemble_env_vars_for_boot_options( @@ -511,7 +511,7 @@ async def merge_settings_before_use( ) settings = _patch_target_service_into_env_vars(settings) - return SimcoreServiceSettingsLabel.parse_obj(settings) + return SimcoreServiceSettingsLabel.model_validate(settings) __all__ = ["merge_settings_before_use", "update_service_params_from_settings"] diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 78b66242355..f317b02f257 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -561,4 +561,4 @@ async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: create_service_params=create_service_params, ) - return AioDockerServiceSpec.parse_obj(create_service_params) + return AioDockerServiceSpec.model_validate(create_service_params) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py index cefbe0156ec..d1022618f09 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py @@ -182,4 +182,4 @@ def spec_volume_removal_service( }, }, } - return AioDockerServiceSpec.parse_obj(create_service_params) + return AioDockerServiceSpec.model_validate(create_service_params) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index 6943f7a0852..88360f8e897 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -247,12 +247,12 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: scheduler_data.user_id, scheduler_data.key, scheduler_data.version ) ).get("sidecar", {}) or {} - user_specific_service_spec = AioDockerServiceSpec.parse_obj( + user_specific_service_spec = AioDockerServiceSpec.model_validate( user_specific_service_spec ) # NOTE: since user_specific_service_spec follows Docker Service Spec and not Aio # we do not use aliases when exporting dynamic_sidecar_service_spec_base - dynamic_sidecar_service_final_spec = AioDockerServiceSpec.parse_obj( + dynamic_sidecar_service_final_spec = AioDockerServiceSpec.model_validate( nested_update( jsonable_encoder(dynamic_sidecar_service_spec_base, exclude_unset=True), jsonable_encoder(user_specific_service_spec, exclude_unset=True), diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index 6fc6357ec3c..d241aecbb2f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -509,7 +509,7 @@ async def get_allow_metrics_collection( bool, AllowMetricsCollectionFrontendUserPreference.get_default_value() ) - allow_metrics_collection = AllowMetricsCollectionFrontendUserPreference.parse_obj( - preference + allow_metrics_collection = ( + AllowMetricsCollectionFrontendUserPreference.model_validate(preference) ) return allow_metrics_collection.value diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py index 7438e9d996f..a1f747263c4 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py @@ -107,7 +107,7 @@ def create_model_from_scheduler_data( service_state: ServiceState, service_message: str, ) -> RunningDynamicServiceDetails: - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( { "boot_type": ServiceBootType.V2, "user_id": scheduler_data.user_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py index d86f09ec9c2..9f7eab202bc 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py @@ -32,4 +32,4 @@ async def get_or_create_api_key_and_secret( name=name, expiration=expiration, ) - return ApiKeyGet.parse_obj(result) + return ApiKeyGet.model_validate(result) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py index cba005a92ae..2d515d4c37f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py @@ -64,7 +64,7 @@ async def requires_dynamic_sidecar( simcore_service_labels: SimcoreServiceLabels = ( await director_v0_client.get_service_labels( - service=ServiceKeyVersion.parse_obj( + service=ServiceKeyVersion.model_validate( {"key": decoded_service_key, "version": service_version} ) ) @@ -184,10 +184,12 @@ async def _get_networks_with_aliases_for_default_network( be on the same network. Return an updated version of the projects_networks """ - new_networks_with_aliases: NetworksWithAliases = NetworksWithAliases.parse_obj({}) + new_networks_with_aliases: NetworksWithAliases = NetworksWithAliases.model_validate( + {} + ) default_network = _network_name(project_id, "default") - new_networks_with_aliases[default_network] = ContainerAliases.parse_obj({}) + new_networks_with_aliases[default_network] = ContainerAliases.model_validate({}) for node_uuid, node_content in new_workbench.items(): # only add dynamic-sidecar nodes @@ -248,7 +250,7 @@ async def update_from_workbench( ) ) except ProjectNetworkNotFoundError: - existing_projects_networks = ProjectsNetworks.parse_obj( + existing_projects_networks = ProjectsNetworks.model_validate( {"project_uuid": project_id, "networks_with_aliases": {}} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/storage.py b/services/director-v2/src/simcore_service_director_v2/modules/storage.py index 98e18845333..b0cbb5e9629 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/storage.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/storage.py @@ -72,5 +72,5 @@ async def get_s3_access(self, user_id: UserID) -> S3Settings: ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - return S3Settings.parse_obj(unenvelope_or_raise_error(resp)) + return S3Settings.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index c6229bacdc8..109af07734b 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -229,7 +229,7 @@ async def compute_input_data( if ports_errors: raise PortsValidationError(project_id, node_id, ports_errors) - return TaskInputData.parse_obj(input_data) + return TaskInputData.model_validate(input_data) async def compute_output_data_schema( @@ -276,7 +276,7 @@ async def compute_output_data_schema( } ) - return TaskOutputDataSchema.parse_obj(output_data_schema) + return TaskOutputDataSchema.model_validate(output_data_schema) _LOGS_FILE_NAME = "logs.zip" diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index eafe6bb15fc..b69de758685 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -237,7 +237,7 @@ def fake_workbench(fake_workbench_file: Path) -> NodesDict: workbench_dict = json.loads(fake_workbench_file.read_text()) workbench = {} for node_id, node_data in workbench_dict.items(): - workbench[node_id] = Node.parse_obj(node_data) + workbench[node_id] = Node.model_validate(node_data) return workbench @@ -334,7 +334,7 @@ async def wrapper(*args, **kwargs): @pytest.fixture def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: - fake_data = ApiKeyGet.parse_obj(ApiKeyGet.Config.schema_extra["examples"][0]) + fake_data = ApiKeyGet.model_validate(ApiKeyGet.Config.schema_extra["examples"][0]) async def _create( app: FastAPI, diff --git a/services/director-v2/tests/helpers/shared_comp_utils.py b/services/director-v2/tests/helpers/shared_comp_utils.py index ad7185e8fa7..5670ffc5de8 100644 --- a/services/director-v2/tests/helpers/shared_comp_utils.py +++ b/services/director-v2/tests/helpers/shared_comp_utils.py @@ -68,7 +68,7 @@ async def check_pipeline_state() -> ComputationGet: assert ( response.status_code == status.HTTP_200_OK ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationGet.parse_obj(response.json()) + task_out = ComputationGet.model_validate(response.json()) assert task_out.id == project_uuid assert task_out.url.path == f"/v2/computations/{project_uuid}" print( diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 110dbd5f89b..bce13680e29 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -107,7 +107,7 @@ def fake_workbench_computational_pipeline_details( ) -> PipelineDetails: adjacency_list = json.loads(fake_workbench_computational_adjacency_file.read_text()) node_states = json.loads(fake_workbench_node_states_file.read_text()) - return PipelineDetails.parse_obj( + return PipelineDetails.model_validate( {"adjacency_list": adjacency_list, "node_states": node_states, "progress": 0} ) @@ -718,7 +718,7 @@ async def test_abort_computation( assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationGet.parse_obj(response.json()) + task_out = ComputationGet.model_validate(response.json()) assert task_out.url.path == f"/v2/computations/{sleepers_project.uuid}:stop" assert task_out.stop_url is None diff --git a/services/director-v2/tests/integration/02/conftest.py b/services/director-v2/tests/integration/02/conftest.py index 0d0df8a402f..ed80fc0fce7 100644 --- a/services/director-v2/tests/integration/02/conftest.py +++ b/services/director-v2/tests/integration/02/conftest.py @@ -64,7 +64,7 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: "simcore_service_director_v2.modules.db.repositories." "projects_networks.ProjectsNetworksRepository.get_projects_networks" ), - return_value=ProjectsNetworks.parse_obj( + return_value=ProjectsNetworks.model_validate( {"project_uuid": uuid4(), "networks_with_aliases": {}} ), ) @@ -81,7 +81,7 @@ def service_resources() -> ServiceResourcesDict: @pytest.fixture def mock_resource_usage_tracker(mocker: MockerFixture) -> None: base_module = "simcore_service_director_v2.modules.resource_usage_tracker_client" - service_pricing_plan = PricingPlanGet.parse_obj( + service_pricing_plan = PricingPlanGet.model_validate( PricingPlanGet.Config.schema_extra["examples"][1] ) for unit in service_pricing_plan.pricing_units: diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 2fe09c42286..62dc63e0c47 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -442,13 +442,13 @@ async def projects_networks_db( # NOTE: director-v2 does not have access to the webserver which creates this # injecting all dynamic-sidecar started services on a default networks - container_aliases: ContainerAliases = ContainerAliases.parse_obj({}) + container_aliases: ContainerAliases = ContainerAliases.model_validate({}) for k, (node_uuid, node) in enumerate(current_study.workbench.items()): if not is_legacy(node): container_aliases[node_uuid] = f"networkable_alias_{k}" - networks_with_aliases: NetworksWithAliases = NetworksWithAliases.parse_obj({}) + networks_with_aliases: NetworksWithAliases = NetworksWithAliases.model_validate({}) default_network_name = f"{PROJECT_NETWORK_PREFIX}_{current_study.uuid}_test" networks_with_aliases[default_network_name] = container_aliases @@ -968,7 +968,7 @@ async def test_nodeports_integration( task_out, project=current_study, exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=PipelineDetails.parse_obj(fake_dy_success), + exp_pipeline_details=PipelineDetails.model_validate(fake_dy_success), iteration=1, cluster_id=DEFAULT_CLUSTER_ID, ) diff --git a/services/director-v2/tests/integration/conftest.py b/services/director-v2/tests/integration/conftest.py index 0e6f8632094..424cb128257 100644 --- a/services/director-v2/tests/integration/conftest.py +++ b/services/director-v2/tests/integration/conftest.py @@ -98,7 +98,7 @@ async def _creator( response.raise_for_status() assert response.status_code == status.HTTP_201_CREATED - computation_task = ComputationGet.parse_obj(response.json()) + computation_task = ComputationGet.model_validate(response.json()) created_comp_tasks.append((user_id, computation_task)) return computation_task diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index f08ffd47337..4dc385388cf 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -56,7 +56,7 @@ def simcore_services_network_name() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: - simcore_service_labels = SimcoreServiceLabels.parse_obj( + simcore_service_labels = SimcoreServiceLabels.model_validate( SimcoreServiceLabels.Config.schema_extra["examples"][1] ) simcore_service_labels.callbacks_mapping = parse_obj_as(CallbacksMapping, {}) @@ -65,7 +65,7 @@ def simcore_service_labels() -> SimcoreServiceLabels: @pytest.fixture def dynamic_service_create() -> DynamicServiceCreate: - return DynamicServiceCreate.parse_obj( + return DynamicServiceCreate.model_validate( DynamicServiceCreate.Config.schema_extra["example"] ) @@ -225,7 +225,7 @@ def fake_service_specifications(faker: Faker) -> dict[str, Any]: # the service specifications follow the Docker service creation available # https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate return { - "sidecar": DockerServiceSpec.parse_obj( + "sidecar": DockerServiceSpec.model_validate( { "Labels": {"label_one": faker.pystr(), "label_two": faker.pystr()}, "TaskTemplate": { diff --git a/services/director-v2/tests/unit/test_models_dynamic_services.py b/services/director-v2/tests/unit/test_models_dynamic_services.py index dd0df8a0eed..e28cd51ca7f 100644 --- a/services/director-v2/tests/unit/test_models_dynamic_services.py +++ b/services/director-v2/tests/unit/test_models_dynamic_services.py @@ -219,7 +219,7 @@ def test_regression_legacy_service_compatibility() -> None: "user_id": "1", "project_id": "b1ec5c8e-f5bb-11eb-b1d5-02420a000006", } - service_details = RunningDynamicServiceDetails.parse_obj(api_response) + service_details = RunningDynamicServiceDetails.model_validate(api_response) assert service_details diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index a01980027c0..16ebc158313 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -372,11 +372,11 @@ def _mocked_node_ports(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_input_data", - return_value=TaskInputData.parse_obj({}), + return_value=TaskInputData.model_validate({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_output_data_schema", - return_value=TaskOutputDataSchema.parse_obj({}), + return_value=TaskOutputDataSchema.model_validate({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_service_log_file_upload_link", @@ -487,7 +487,7 @@ def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.model_validate(HardwareInfo.Config.schema_extra["examples"][0]) @pytest.fixture @@ -534,7 +534,7 @@ def fake_sidecar_fct( event = distributed.Event(_DASK_EVENT_NAME) event.wait(timeout=25) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster # NOTE2: since there is only 1 task here, it's ok to pass the nodeID @@ -650,7 +650,7 @@ def fake_sidecar_fct( task = worker.state.tasks.get(worker.get_current_task()) assert task is not None - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster published_computation_task = await dask_client.send_computation_tasks( @@ -742,7 +742,7 @@ def fake_remote_fct( print("--> raising cancellation error now") raise TaskCancelledError - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, @@ -1088,7 +1088,7 @@ def fake_remote_fct( if fail_remote_fct: err_msg = "We fail because we're told to!" raise ValueError(err_msg) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, @@ -1179,7 +1179,7 @@ def fake_remote_fct( published_event = Event(name=_DASK_START_EVENT) published_event.set() - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # run the computation published_computation_task = await dask_client.send_computation_tasks( @@ -1255,7 +1255,7 @@ def fake_sidecar_fct( event = distributed.Event(_DASK_EVENT_NAME) event.wait(timeout=25) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster published_computation_task = await dask_client.send_computation_tasks( diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index 3dd97cc4753..f9e8b1f13b7 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -87,7 +87,7 @@ def creator(num_clusters: int) -> list[Cluster]: fake_clusters = [] for n in range(num_clusters): fake_clusters.append( - Cluster.parse_obj( + Cluster.model_validate( { "id": faker.pyint(), "name": faker.name(), diff --git a/services/director-v2/tests/unit/test_modules_project_networks.py b/services/director-v2/tests/unit/test_modules_project_networks.py index 585d8131b8e..848b3629e10 100644 --- a/services/director-v2/tests/unit/test_modules_project_networks.py +++ b/services/director-v2/tests/unit/test_modules_project_networks.py @@ -40,8 +40,8 @@ def using( attach: list[Any], ) -> "Example": return cls( - existing_networks_with_aliases=NetworksWithAliases.parse_obj(existing), - new_networks_with_aliases=NetworksWithAliases.parse_obj(new), + existing_networks_with_aliases=NetworksWithAliases.model_validate(existing), + new_networks_with_aliases=NetworksWithAliases.model_validate(new), expected_calls=MockedCalls(detach=detach, attach=attach), ) @@ -184,7 +184,7 @@ def dy_workbench_with_networkable_labels(mocks_dir: Path) -> NodesDict: for node_uuid, node_data in dy_workbench.items(): node_data["label"] = f"label_{uuid4()}" - parsed_workbench[node_uuid] = Node.parse_obj(node_data) + parsed_workbench[node_uuid] = Node.model_validate(node_data) return parsed_workbench diff --git a/services/director-v2/tests/unit/test_utils_comp_scheduler.py b/services/director-v2/tests/unit/test_utils_comp_scheduler.py index 970cdad75b7..fd1ecc5022c 100644 --- a/services/director-v2/tests/unit/test_utils_comp_scheduler.py +++ b/services/director-v2/tests/unit/test_utils_comp_scheduler.py @@ -78,7 +78,7 @@ def test_get_resource_tracking_run_id( @pytest.mark.parametrize( "task", [ - CompTaskAtDB.parse_obj(example) + CompTaskAtDB.model_validate(example) for example in CompTaskAtDB.Config.schema_extra["examples"] ], ids=str, diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 09b727449f2..4094ce65b5d 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -195,7 +195,7 @@ def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: cluster_config = Cluster.Config.schema_extra["examples"][1] cluster_config["owner"] = user["primary_gid"] cluster_config.update(**cluster_kwargs) - new_cluster = Cluster.parse_obj(cluster_config) + new_cluster = Cluster.model_validate(cluster_config) assert new_cluster with postgres_db.connect() as conn: diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 63457484613..dcdee3e5a6f 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -70,7 +70,7 @@ def creator() -> dict[str, Any]: "username": faker.user_name(), "password": faker.password(), } - assert SimpleAuthentication.parse_obj(simple_auth) + assert SimpleAuthentication.model_validate(simple_auth) return simple_auth return creator @@ -430,7 +430,7 @@ async def test_update_own_cluster( json=cluster_patch.dict(**_PATCH_EXPORT), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = ClusterGet.parse_obj(response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) expected_modified_cluster.access_rights[user_2["primary_gid"]] = rights assert returned_cluster.dict( @@ -447,7 +447,7 @@ async def test_update_own_cluster( ), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = ClusterGet.parse_obj(response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) expected_modified_cluster.owner = user_2["primary_gid"] expected_modified_cluster.access_rights[ user_2["primary_gid"] diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py index 2b509ab1a6f..e9394aba278 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py @@ -114,12 +114,12 @@ async def test_get_default_cluster_details( f"/v2/clusters/default/details?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK - default_cluster_out = ClusterDetailsGet.parse_obj(response.json()) + default_cluster_out = ClusterDetailsGet.model_validate(response.json()) response = await async_client.get( f"/v2/clusters/{0}/details?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK - assert default_cluster_out == ClusterDetailsGet.parse_obj(response.json()) + assert default_cluster_out == ClusterDetailsGet.model_validate(response.json()) async def _get_cluster_details( @@ -130,7 +130,7 @@ async def _get_cluster_details( ) assert response.status_code == status.HTTP_200_OK print(f"<-- received cluster details response {response=}") - cluster_out = ClusterDetailsGet.parse_obj(response.json()) + cluster_out = ClusterDetailsGet.model_validate(response.json()) assert cluster_out print(f"<-- received cluster details {cluster_out=}") assert cluster_out.scheduler, "the cluster's scheduler is not started!" diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 7fe67666267..d64d3212ccf 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -861,7 +861,7 @@ async def test_get_computation_from_empty_project( ) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_computation = ComputationGet( id=proj.uuid, @@ -910,7 +910,7 @@ async def test_get_computation_from_not_started_computation_task( comp_tasks = tasks(user=user, project=proj) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_computation = ComputationGet( id=proj.uuid, @@ -980,7 +980,7 @@ async def test_get_computation_from_published_computation_task( ) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_stop_url = async_client.base_url.join( f"/v2/computations/{proj.uuid}:stop?user_id={user['id']}" diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index 4ddb656a0b2..c726c6d2f9c 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -160,7 +160,7 @@ async def mock_retrieve_features( assert_all_mocked=True, ) as respx_mock: if is_legacy: - service_details = RunningDynamicServiceDetails.parse_obj( + service_details = RunningDynamicServiceDetails.model_validate( RunningDynamicServiceDetails.Config.schema_extra["examples"][0] ) respx_mock.post( @@ -244,7 +244,7 @@ def get_stack_status(node_uuid: NodeID) -> RunningDynamicServiceDetails: if exp_status_code == status.HTTP_307_TEMPORARY_REDIRECT: raise DynamicSidecarNotFoundError(node_uuid) - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( RunningDynamicServiceDetails.Config.schema_extra["examples"][0] ) @@ -314,7 +314,7 @@ def test_create_dynamic_services( exp_status_code: int, is_legacy: bool, ): - post_data = DynamicServiceCreate.parse_obj(service) + post_data = DynamicServiceCreate.model_validate(service) response = client.post( "/v2/dynamic_services", @@ -498,7 +498,7 @@ def test_delete_service_waiting_for_manual_intervention( is_legacy: bool, dynamic_sidecar_scheduler: DynamicSidecarsScheduler, ): - post_data = DynamicServiceCreate.parse_obj(service) + post_data = DynamicServiceCreate.model_validate(service) response = client.post( "/v2/dynamic_services", diff --git a/services/director-v2/tests/unit/with_dbs/test_cli.py b/services/director-v2/tests/unit/with_dbs/test_cli.py index 0322610985c..d54008a7abc 100644 --- a/services/director-v2/tests/unit/with_dbs/test_cli.py +++ b/services/director-v2/tests/unit/with_dbs/test_cli.py @@ -105,7 +105,7 @@ def node_id(faker: Faker) -> NodeID: def mock_get_node_state(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.cli._core._get_dy_service_state", - return_value=DynamicServiceGet.parse_obj( + return_value=DynamicServiceGet.model_validate( RunningDynamicServiceDetails.Config.schema_extra["examples"][0] ), ) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index 2968e96e5db..94916ba205f 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -122,7 +122,7 @@ async def _assert_comp_run_db( & (comp_runs.c.project_uuid == f"{pub_project.project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert ( run_entry.result == expected_state ), f"comp_runs: expected state '{expected_state}, found '{run_entry.result}'" @@ -365,7 +365,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert run_entry.result == RunningState.PUBLISHED # let the scheduler kick in await run_comp_scheduler(scheduler) @@ -379,7 +379,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert run_entry.result == RunningState.ABORTED @@ -760,7 +760,7 @@ async def _return_1st_task_success(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.side_effect = _return_1st_task_success async def _return_random_task_result(job_id) -> TaskOutputData: - return TaskOutputData.parse_obj({"out_1": None, "out_2": 45}) + return TaskOutputData.model_validate({"out_1": None, "out_2": 45}) mocked_dask_client.get_task_result.side_effect = _return_random_task_result await run_comp_scheduler(scheduler) @@ -1178,7 +1178,7 @@ class RebootState: pytest.param( RebootState( dask_task_status=DaskClientTaskState.SUCCESS, - task_result=TaskOutputData.parse_obj({"whatever_output": 123}), + task_result=TaskOutputData.model_validate({"whatever_output": 123}), expected_task_state_group1=RunningState.SUCCESS, expected_task_progress_group1=1, expected_task_state_group2=RunningState.SUCCESS, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index b08c5c0c00c..49a429bd0dd 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -338,7 +338,7 @@ def service_name() -> str: @pytest.fixture( params=[ - SimcoreServiceLabels.parse_obj(example) + SimcoreServiceLabels.model_validate(example) for example in SimcoreServiceLabels.Config.schema_extra["examples"] ], ) @@ -403,7 +403,7 @@ def test_settings__valid_network_names( items["SIMCORE_SERVICES_NETWORK_NAME"] = simcore_services_network_name # validate network names - DynamicServicesSchedulerSettings.parse_obj(items) + DynamicServicesSchedulerSettings.model_validate(items) async def test_failed_docker_client_request(docker_swarm: None): diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 4ebacc3424e..670456e142a 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -46,7 +46,7 @@ @pytest.fixture def mock_s3_settings() -> S3Settings: - return S3Settings.parse_obj(S3Settings.Config.schema_extra["examples"][0]) + return S3Settings.model_validate(S3Settings.Config.schema_extra["examples"][0]) @pytest.fixture @@ -112,14 +112,14 @@ def swarm_network_id() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: # overwrites global fixture - return SimcoreServiceLabels.parse_obj( + return SimcoreServiceLabels.model_validate( SimcoreServiceLabels.Config.schema_extra["examples"][2] ) @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.model_validate(HardwareInfo.Config.schema_extra["examples"][0]) @pytest.fixture @@ -134,7 +134,7 @@ def expected_dynamic_sidecar_spec( return { "endpoint_spec": {}, "labels": { - "io.simcore.scheduler-data": SchedulerData.parse_obj( + "io.simcore.scheduler-data": SchedulerData.model_validate( { "compose_spec": '{"version": "2.3", "services": {"rt-web": {"image": ' '"${SIMCORE_REGISTRY}/simcore/services/dynamic/sim4life:${SERVICE_VERSION}", ' @@ -432,7 +432,7 @@ async def test_get_dynamic_proxy_spec( == minimal_app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR ) - expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.parse_obj( + expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec ) assert expected_dynamic_sidecar_spec_model.TaskTemplate @@ -553,7 +553,7 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( ) assert dynamic_sidecar_spec dynamic_sidecar_spec_dict = dynamic_sidecar_spec.dict() - expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.parse_obj( + expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec ).dict() # ensure some entries are sorted the same to prevent flakyness @@ -582,7 +582,7 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( ) assert user_service_specs assert "sidecar" in user_service_specs - user_aiodocker_service_spec = AioDockerServiceSpec.parse_obj( + user_aiodocker_service_spec = AioDockerServiceSpec.model_validate( user_service_specs["sidecar"] ) assert user_aiodocker_service_spec diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py index fd5ce9a2cb2..216d7627800 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py @@ -43,9 +43,9 @@ async def get_status( # in case of legacy version # we need to transfer the correct format! if "data" in dict_response: - return NodeGet.parse_obj(dict_response["data"]) + return NodeGet.model_validate(dict_response["data"]) - return DynamicServiceGet.parse_obj(dict_response) + return DynamicServiceGet.model_validate(dict_response) except UnexpectedStatusError as e: if ( e.response.status_code # type: ignore[attr-defined] # pylint:disable=no-member @@ -62,9 +62,9 @@ async def run_dynamic_service( # legacy services if "data" in dict_response: - return NodeGet.parse_obj(dict_response["data"]) + return NodeGet.model_validate(dict_response["data"]) - return DynamicServiceGet.parse_obj(dict_response) + return DynamicServiceGet.model_validate(dict_response) async def stop_dynamic_service( self, diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index 7c8dada1e18..379719e7297 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -52,14 +52,14 @@ def node_not_found(faker: Faker) -> NodeID: @pytest.fixture def service_status_new_style() -> DynamicServiceGet: - return DynamicServiceGet.parse_obj( + return DynamicServiceGet.model_validate( DynamicServiceGet.Config.schema_extra["examples"][1] ) @pytest.fixture def service_status_legacy() -> NodeGet: - return NodeGet.parse_obj(NodeGet.Config.schema_extra["example"]) + return NodeGet.model_validate(NodeGet.Config.schema_extra["example"]) @pytest.fixture @@ -173,7 +173,7 @@ async def test_get_state( @pytest.fixture def dynamic_service_start() -> DynamicServiceStart: # one for legacy and one for new style? - return DynamicServiceStart.parse_obj( + return DynamicServiceStart.model_validate( DynamicServiceStart.Config.schema_extra["example"] ) diff --git a/services/dynamic-scheduler/tests/unit/test__model_examples.py b/services/dynamic-scheduler/tests/unit/test__model_examples.py index 858bcc66a4d..2d89091d1b6 100644 --- a/services/dynamic-scheduler/tests/unit/test__model_examples.py +++ b/services/dynamic-scheduler/tests/unit/test__model_examples.py @@ -15,7 +15,7 @@ def test_api_server_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): try: - assert model_cls.parse_obj(example_data) is not None + assert model_cls.model_validate(example_data) is not None except ValidationError as err: pytest.fail( f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" diff --git a/services/dynamic-sidecar/tests/unit/test_api_containers.py b/services/dynamic-sidecar/tests/unit/test_api_containers.py index 3a053d8e2fe..219d1336476 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_containers.py @@ -266,10 +266,10 @@ def not_started_containers() -> list[str]: @pytest.fixture def mock_outputs_labels() -> dict[str, ServiceOutput]: return { - "output_port_1": ServiceOutput.parse_obj( + "output_port_1": ServiceOutput.model_validate( ServiceOutput.Config.schema_extra["examples"][3] ), - "output_port_2": ServiceOutput.parse_obj( + "output_port_2": ServiceOutput.model_validate( ServiceOutput.Config.schema_extra["examples"][3] ), } diff --git a/services/dynamic-sidecar/tests/unit/test_core_stroage.py b/services/dynamic-sidecar/tests/unit/test_core_stroage.py index c8f1d19405d..cc45300dbe2 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_stroage.py +++ b/services/dynamic-sidecar/tests/unit/test_core_stroage.py @@ -60,7 +60,7 @@ async def unprotected_route(): def storage_auth_settings( username: str | None, password: str | None ) -> StorageAuthSettings: - return StorageAuthSettings.parse_obj( + return StorageAuthSettings.model_validate( { "STORAGE_HOST": "localhost", "STORAGE_PORT": 44332, diff --git a/services/invitations/tests/unit/api/test_api_invitations.py b/services/invitations/tests/unit/api/test_api_invitations.py index 572f30f8173..87ee9c28800 100644 --- a/services/invitations/tests/unit/api/test_api_invitations.py +++ b/services/invitations/tests/unit/api/test_api_invitations.py @@ -64,7 +64,7 @@ def test_check_invitation( # up ot here, identifcal to above. # Let's use invitation link - invitation_url = ApiInvitationContentAndLink.parse_obj( + invitation_url = ApiInvitationContentAndLink.model_validate( response.json() ).invitation_url @@ -77,7 +77,7 @@ def test_check_invitation( assert response.status_code == 200, f"{response.json()=}" # decrypted invitation should be identical to request above - invitation = InvitationContent.parse_obj(response.json()) + invitation = InvitationContent.model_validate(response.json()) assert invitation.issuer == invitation_data.issuer assert invitation.guest == invitation_data.guest assert invitation.trial_account_days == invitation_data.trial_account_days @@ -106,7 +106,7 @@ def test_check_valid_invitation( assert response.status_code == 200, f"{response.json()=}" # decrypted invitation should be identical to request above - invitation = InvitationContent.parse_obj(response.json()) + invitation = InvitationContent.model_validate(response.json()) assert invitation.issuer == invitation_data.issuer assert invitation.guest == invitation_data.guest diff --git a/services/invitations/tests/unit/api/test_api_meta.py b/services/invitations/tests/unit/api/test_api_meta.py index cee4afd13c9..ff9c3768e5e 100644 --- a/services/invitations/tests/unit/api/test_api_meta.py +++ b/services/invitations/tests/unit/api/test_api_meta.py @@ -19,7 +19,7 @@ def test_healthcheck(client: TestClient): def test_meta(client: TestClient): response = client.get(f"/{API_VTAG}/meta") assert response.status_code == status.HTTP_200_OK - meta = _Meta.parse_obj(response.json()) + meta = _Meta.model_validate(response.json()) response = client.get(meta.docs_url) assert response.status_code == status.HTTP_200_OK diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py index 1b6ea4ee6e9..fde239d5332 100644 --- a/services/invitations/tests/unit/conftest.py +++ b/services/invitations/tests/unit/conftest.py @@ -109,4 +109,4 @@ def invitation_data( if product: kwargs["product"] = product - return InvitationInputs.parse_obj(kwargs) + return InvitationInputs.model_validate(kwargs) diff --git a/services/invitations/tests/unit/test__model_examples.py b/services/invitations/tests/unit/test__model_examples.py index 31ed0dfc603..78dfdd96669 100644 --- a/services/invitations/tests/unit/test__model_examples.py +++ b/services/invitations/tests/unit/test__model_examples.py @@ -26,4 +26,4 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json.dumps(example_data)) - assert model_cls.parse_obj(example_data) + assert model_cls.model_validate(example_data) diff --git a/services/invitations/tests/unit/test_cli.py b/services/invitations/tests/unit/test_cli.py index e77247115cf..6a631e660e4 100644 --- a/services/invitations/tests/unit/test_cli.py +++ b/services/invitations/tests/unit/test_cli.py @@ -82,7 +82,7 @@ def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): environs = load_dotenv(result.stdout) envs = setenvs_from_dict(monkeypatch, environs) - settings_from_obj = ApplicationSettings.parse_obj(envs) + settings_from_obj = ApplicationSettings.model_validate(envs) settings_from_envs = ApplicationSettings.create_from_envs() assert settings_from_envs == settings_from_obj diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index 1e5748103ee..d2789c67221 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -127,7 +127,7 @@ async def init_payment(self, payment: InitPayment) -> PaymentInitiated: json=jsonable_encoder(payment.dict(exclude_none=True, by_alias=True)), ) response.raise_for_status() - return PaymentInitiated.parse_obj(response.json()) + return PaymentInitiated.model_validate(response.json()) def get_form_payment_url(self, id_: PaymentID) -> URL: return self.client.base_url.copy_with(path="/pay", params={"id": f"{id_}"}) @@ -141,7 +141,7 @@ async def cancel_payment( json=jsonable_encoder(payment_initiated), ) response.raise_for_status() - return PaymentCancelled.parse_obj(response.json()) + return PaymentCancelled.model_validate(response.json()) # # api: payment method workflows @@ -157,7 +157,7 @@ async def init_payment_method( json=jsonable_encoder(payment_method), ) response.raise_for_status() - return PaymentMethodInitiated.parse_obj(response.json()) + return PaymentMethodInitiated.model_validate(response.json()) def get_form_payment_method_url(self, id_: PaymentMethodID) -> URL: return self.client.base_url.copy_with( @@ -177,13 +177,13 @@ async def get_many_payment_methods( json=jsonable_encoder(BatchGetPaymentMethods(payment_methods_ids=ids_)), ) response.raise_for_status() - return PaymentMethodsBatch.parse_obj(response.json()).items + return PaymentMethodsBatch.model_validate(response.json()).items @_handle_status_errors async def get_payment_method(self, id_: PaymentMethodID) -> GetPaymentMethod: response = await self.client.get(f"/payment-methods/{id_}") response.raise_for_status() - return GetPaymentMethod.parse_obj(response.json()) + return GetPaymentMethod.model_validate(response.json()) @_handle_status_errors async def delete_payment_method(self, id_: PaymentMethodID) -> None: @@ -201,7 +201,7 @@ async def pay_with_payment_method( json=jsonable_encoder(payment.dict(exclude_none=True, by_alias=True)), ) response.raise_for_status() - return AckPaymentWithPaymentMethod.parse_obj(response.json()) + return AckPaymentWithPaymentMethod.model_validate(response.json()) def setup_payments_gateway(app: FastAPI): diff --git a/services/payments/tests/unit/api/test_rest_acknowledgements.py b/services/payments/tests/unit/api/test_rest_acknowledgements.py index 4df30829f93..f8544e5f370 100644 --- a/services/payments/tests/unit/api/test_rest_acknowledgements.py +++ b/services/payments/tests/unit/api/test_rest_acknowledgements.py @@ -108,7 +108,7 @@ async def test_payments_api_authentication( ) assert response.status_code == status.HTTP_404_NOT_FOUND, response.json() - error = DefaultApiError.parse_obj(response.json()) + error = DefaultApiError.model_validate(response.json()) assert PaymentNotFoundError.msg_template.format(payment_id=payments_id) == str( error.detail ) @@ -138,7 +138,7 @@ async def test_payments_methods_api_authentication( ) assert response.status_code == status.HTTP_404_NOT_FOUND, response.json() - error = DefaultApiError.parse_obj(response.json()) + error = DefaultApiError.model_validate(response.json()) assert PaymentMethodNotFoundError.msg_template.format( payment_method_id=payment_method_id ) == str(error.detail) diff --git a/services/payments/tests/unit/api/test_rest_meta.py b/services/payments/tests/unit/api/test_rest_meta.py index 1ca81c9b0a2..993a98fd45a 100644 --- a/services/payments/tests/unit/api/test_rest_meta.py +++ b/services/payments/tests/unit/api/test_rest_meta.py @@ -60,7 +60,7 @@ async def test_meta( ): response = await client.get(f"/{API_VTAG}/meta", headers=auth_headers) assert response.status_code == status.HTTP_200_OK - meta = Meta.parse_obj(response.json()) + meta = Meta.model_validate(response.json()) response = await client.get(meta.docs_url) assert response.status_code == status.HTTP_200_OK diff --git a/services/payments/tests/unit/test__model_examples.py b/services/payments/tests/unit/test__model_examples.py index beab80e794d..6e072aa658a 100644 --- a/services/payments/tests/unit/test__model_examples.py +++ b/services/payments/tests/unit/test__model_examples.py @@ -21,7 +21,7 @@ def test_api_server_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): try: - assert model_cls.parse_obj(example_data) is not None + assert model_cls.model_validate(example_data) is not None except ValidationError as err: pytest.fail( f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" diff --git a/services/payments/tests/unit/test_services_auto_recharge_listener.py b/services/payments/tests/unit/test_services_auto_recharge_listener.py index 196c1cdcd98..27b7883b6af 100644 --- a/services/payments/tests/unit/test_services_auto_recharge_listener.py +++ b/services/payments/tests/unit/test_services_auto_recharge_listener.py @@ -200,7 +200,7 @@ async def get_invoice_data( dollar_amount: Decimal, product_name: ProductName, ) -> InvoiceDataGet: - return InvoiceDataGet.parse_obj( + return InvoiceDataGet.model_validate( InvoiceDataGet.Config.schema_extra["examples"][0] ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py index d37f244dbc9..faf7335e1ac 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_pricing_plans.py @@ -96,7 +96,7 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ] = await resource_tracker_repo.list_connected_services_to_pricing_plan_by_pricing_plan( product_name=product_name, pricing_plan_id=pricing_plan_id ) - return [PricingPlanToServiceGet.parse_obj(item) for item in output_list] + return [PricingPlanToServiceGet.model_validate(item) for item in output_list] async def connect_service_to_pricing_plan( @@ -116,7 +116,7 @@ async def connect_service_to_pricing_plan( service_version=service_version, ) ) - return PricingPlanToServiceGet.parse_obj(output) + return PricingPlanToServiceGet.model_validate(output) async def list_pricing_plans_by_product( diff --git a/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py b/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py index cffb606fae5..568abc8eca0 100644 --- a/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py +++ b/services/resource-usage-tracker/tests/unit/api_rest/test_api_meta.py @@ -68,7 +68,7 @@ def test_meta( ): response = client.get(f"/{API_VTAG}/meta") assert response.status_code == status.HTTP_200_OK - meta = _Meta.parse_obj(response.json()) + meta = _Meta.model_validate(response.json()) response = client.get(meta.docs_url) assert response.status_code == status.HTTP_200_OK diff --git a/services/storage/src/simcore_service_storage/handlers_health.py b/services/storage/src/simcore_service_storage/handlers_health.py index eb94feb9bb4..cfe0f45272b 100644 --- a/services/storage/src/simcore_service_storage/handlers_health.py +++ b/services/storage/src/simcore_service_storage/handlers_health.py @@ -31,7 +31,7 @@ async def get_health(request: web.Request) -> web.Response: assert request # nosec return web.json_response( { - "data": HealthCheck.parse_obj( + "data": HealthCheck.model_validate( { "name": PROJECT_NAME, "version": f"{VERSION}", @@ -69,7 +69,7 @@ async def get_status(request: web.Request) -> web.Response: "connected" if await is_pg_responsive(request.app) else "failed" ) - status = AppStatusCheck.parse_obj( + status = AppStatusCheck.model_validate( { "app_name": PROJECT_NAME, "version": f"{VERSION}", diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index d05099edd06..c72bf436f3a 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -128,7 +128,7 @@ def from_simcore_node( "is_directory": False, } fmd_kwargs.update(**file_meta_data_kwargs) - return cls.parse_obj(fmd_kwargs) + return cls.model_validate(fmd_kwargs) @dataclass diff --git a/services/storage/src/simcore_service_storage/utils.py b/services/storage/src/simcore_service_storage/utils.py index 0baddfcfc9a..e2755f6de45 100644 --- a/services/storage/src/simcore_service_storage/utils.py +++ b/services/storage/src/simcore_service_storage/utils.py @@ -15,7 +15,7 @@ def convert_db_to_model(x: FileMetaDataAtDB) -> FileMetaData: - model: FileMetaData = FileMetaData.parse_obj( + model: FileMetaData = FileMetaData.model_validate( x.dict() | { "file_uuid": x.file_id, diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 1b3f634446c..703a34dada9 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -363,7 +363,9 @@ async def _uploader( data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert not error assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + file_upload_complete_response = FileUploadCompleteResponse.model_validate( + data + ) state_url = URL(file_upload_complete_response.links.state).relative() completion_etag = None @@ -382,7 +384,7 @@ async def _uploader( data, error = await assert_status(response, status.HTTP_200_OK) assert not error assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) + future = FileUploadCompleteFutureResponse.model_validate(data) if future.state == FileUploadCompleteState.NOK: msg = f"{data=}" raise ValueError(msg) @@ -479,7 +481,7 @@ async def _directory_creator(dir_name: str): data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert not error assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + file_upload_complete_response = FileUploadCompleteResponse.model_validate(data) state_url = URL(file_upload_complete_response.links.state).relative() # check that it finished updating @@ -500,7 +502,7 @@ async def _directory_creator(dir_name: str): data, error = await assert_status(response, status.HTTP_200_OK) assert not error assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) + future = FileUploadCompleteFutureResponse.model_validate(data) assert future.state == FileUploadCompleteState.OK assert future.e_tag is None ctx.logger.info( diff --git a/services/storage/tests/unit/test_cli.py b/services/storage/tests/unit/test_cli.py index cab69609fdd..cff3633bc4f 100644 --- a/services/storage/tests/unit/test_cli.py +++ b/services/storage/tests/unit/test_cli.py @@ -29,7 +29,7 @@ def test_cli_settings_as_json( assert result.exit_code == os.EX_OK, result # reuse resulting json to build settings settings: dict = json.loads(result.stdout) - assert Settings.parse_obj(settings) + assert Settings.model_validate(settings) def test_cli_settings_env_file( @@ -46,4 +46,4 @@ def test_cli_settings_env_file( with contextlib.suppress(json.decoder.JSONDecodeError): settings[key] = json.loads(str(value)) - assert Settings.parse_obj(settings) + assert Settings.model_validate(settings) diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 21e3bac8c3d..318650a1076 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -622,7 +622,7 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert not error assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + file_upload_complete_response = FileUploadCompleteResponse.model_validate(data) state_url = URL(file_upload_complete_response.links.state).relative() # here we do not check now for the state completion. instead we simulate a restart where the tasks disappear @@ -643,7 +643,7 @@ async def test_upload_real_file_with_emulated_storage_restart_after_completion_w data, error = await assert_status(response, status.HTTP_200_OK) assert not error assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) + future = FileUploadCompleteFutureResponse.model_validate(data) assert future.state == FileUploadCompleteState.OK assert future.e_tag is not None completion_etag = future.e_tag @@ -760,7 +760,7 @@ async def test_upload_real_file_with_s3_client( data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert not error assert data - file_upload_complete_response = FileUploadCompleteResponse.parse_obj(data) + file_upload_complete_response = FileUploadCompleteResponse.model_validate(data) state_url = URL(file_upload_complete_response.links.state).relative() completion_etag = None async for attempt in AsyncRetrying( @@ -778,7 +778,7 @@ async def test_upload_real_file_with_s3_client( data, error = await assert_status(response, status.HTTP_200_OK) assert not error assert data - future = FileUploadCompleteFutureResponse.parse_obj(data) + future = FileUploadCompleteFutureResponse.model_validate(data) if future.state != FileUploadCompleteState.OK: msg = f"{data=}" raise ValueError(msg) diff --git a/services/storage/tests/unit/test_handlers_health.py b/services/storage/tests/unit/test_handlers_health.py index d10b882b611..8705c4c8e36 100644 --- a/services/storage/tests/unit/test_handlers_health.py +++ b/services/storage/tests/unit/test_handlers_health.py @@ -26,7 +26,7 @@ async def test_health_check(client: TestClient): assert data assert not error - app_health = HealthCheck.parse_obj(data) + app_health = HealthCheck.model_validate(data) assert app_health.name == simcore_service_storage._meta.PROJECT_NAME # noqa: SLF001 assert app_health.version == str( simcore_service_storage._meta.VERSION @@ -41,7 +41,7 @@ async def test_health_status(client: TestClient): assert data assert not error - app_status_check = AppStatusCheck.parse_obj(data) + app_status_check = AppStatusCheck.model_validate(data) assert ( app_status_check.app_name == simcore_service_storage._meta.PROJECT_NAME ) # noqa: SLF001 @@ -68,7 +68,7 @@ async def test_bad_health_status_if_bucket_missing( data, error = await assert_status(response, status.HTTP_200_OK) assert data assert not error - app_status_check = AppStatusCheck.parse_obj(data) + app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "connected" # now delete the bucket await s3_client.delete_bucket(Bucket=storage_s3_bucket) @@ -77,7 +77,7 @@ async def test_bad_health_status_if_bucket_missing( data, error = await assert_status(response, status.HTTP_200_OK) assert data assert not error - app_status_check = AppStatusCheck.parse_obj(data) + app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "no access to S3 bucket" @@ -90,7 +90,7 @@ async def test_bad_health_status_if_s3_server_missing( data, error = await assert_status(response, status.HTTP_200_OK) assert data assert not error - app_status_check = AppStatusCheck.parse_obj(data) + app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "connected" # now disable the s3 server mocked_aws_server.stop() @@ -99,7 +99,7 @@ async def test_bad_health_status_if_s3_server_missing( data, error = await assert_status(response, status.HTTP_200_OK) assert data assert not error - app_status_check = AppStatusCheck.parse_obj(data) + app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "failed" # start the server again mocked_aws_server.start() @@ -108,5 +108,5 @@ async def test_bad_health_status_if_s3_server_missing( data, error = await assert_status(response, status.HTTP_200_OK) assert data assert not error - app_status_check = AppStatusCheck.parse_obj(data) + app_status_check = AppStatusCheck.model_validate(data) assert app_status_check.services["s3"]["healthy"] == "connected" diff --git a/services/storage/tests/unit/test_handlers_simcore_s3.py b/services/storage/tests/unit/test_handlers_simcore_s3.py index e922f1f60f0..d3cde0e1ef9 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3.py @@ -82,7 +82,7 @@ async def test_simcore_s3_access_returns_default(client: TestClient): data, error = await assert_status(response, status.HTTP_200_OK) assert not error assert data - received_settings = S3Settings.parse_obj(data) + received_settings = S3Settings.model_validate(data) assert received_settings diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_api.py b/services/web/server/src/simcore_service_webserver/api_keys/_api.py index 9a46ad9f512..9bbe56f7c6f 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_api.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_api.py @@ -70,7 +70,7 @@ async def get_api_key( ) -> ApiKeyGet | None: repo = ApiKeyRepo.create_from_app(app) row = await repo.get(display_name=name, user_id=user_id, product_name=product_name) - return ApiKeyGet.parse_obj(row) if row else None + return ApiKeyGet.model_validate(row) if row else None async def get_or_create_api_key( diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_handlers.py b/services/web/server/src/simcore_service_webserver/api_keys/_handlers.py index ce7a7be0943..aac66007940 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_handlers.py @@ -31,7 +31,7 @@ class _RequestContext(RequestParams): @login_required @permission_required("user.apikey.*") async def list_api_keys(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) api_keys_names = await _api.list_api_keys( request.app, user_id=req_ctx.user_id, @@ -44,7 +44,7 @@ async def list_api_keys(request: web.Request): @login_required @permission_required("user.apikey.*") async def create_api_key(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) new = await parse_request_body_as(ApiKeyCreate, request) try: data = await _api.create_api_key( @@ -66,7 +66,7 @@ async def create_api_key(request: web.Request): @login_required @permission_required("user.apikey.*") async def delete_api_key(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) # NOTE: SEE https://github.com/ITISFoundation/osparc-simcore/issues/4920 body = await request.json() diff --git a/services/web/server/src/simcore_service_webserver/catalog/_api.py b/services/web/server/src/simcore_service_webserver/catalog/_api.py index 9bbbae4e43c..391652ba4ef 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_api.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_api.py @@ -157,7 +157,7 @@ async def update_service_v2( user_id=user_id, service_key=service_key, service_version=service_version, - update=ServiceUpdateV2.parse_obj(update_data), + update=ServiceUpdateV2.model_validate(update_data), ) data = jsonable_encoder(service, exclude_unset=True) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py index cfaafe30ab6..9dbc05d20fb 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py @@ -98,7 +98,7 @@ async def list_services_latest(request: Request): assert page_meta.limit == query_params.limit # nosec assert page_meta.offset == query_params.offset # nosec - page = Page[CatalogServiceGet].parse_obj( + page = Page[CatalogServiceGet].model_validate( paginate_data( chunk=page_items, request_url=request.url, @@ -133,7 +133,7 @@ async def get_service(request: Request): service_version=path_params.service_version, ) - return envelope_json_response(CatalogServiceGet.parse_obj(service)) + return envelope_json_response(CatalogServiceGet.model_validate(service)) @routes.patch( @@ -164,7 +164,7 @@ async def update_service(request: Request): unit_registry=request_ctx.unit_registry, ) - return envelope_json_response(CatalogServiceGet.parse_obj(updated)) + return envelope_json_response(CatalogServiceGet.model_validate(updated)) @routes.get( diff --git a/services/web/server/src/simcore_service_webserver/catalog/client.py b/services/web/server/src/simcore_service_webserver/catalog/client.py index 8a8f6083252..b8de2956019 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/client.py +++ b/services/web/server/src/simcore_service_webserver/catalog/client.py @@ -168,7 +168,7 @@ async def get_service_access_rights( ) as resp: resp.raise_for_status() body = await resp.json() - return ServiceAccessRightsGet.parse_obj(body) + return ServiceAccessRightsGet.model_validate(body) async def update_service( diff --git a/services/web/server/src/simcore_service_webserver/clusters/_handlers.py b/services/web/server/src/simcore_service_webserver/clusters/_handlers.py index 70752da883b..9c4bb81cf95 100644 --- a/services/web/server/src/simcore_service_webserver/clusters/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/clusters/_handlers.py @@ -78,7 +78,7 @@ class _RequestContext(BaseModel): @permission_required("clusters.create") @_handle_cluster_exceptions async def create_cluster(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) new_cluster = await parse_request_body_as(ClusterCreate, request) created_cluster = await director_v2_api.create_cluster( @@ -94,7 +94,7 @@ async def create_cluster(request: web.Request) -> web.Response: @permission_required("clusters.read") @_handle_cluster_exceptions async def list_clusters(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) clusters = await director_v2_api.list_clusters( app=request.app, @@ -109,7 +109,7 @@ async def list_clusters(request: web.Request) -> web.Response: @permission_required("clusters.read") @_handle_cluster_exceptions async def get_cluster(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ClusterPathParams, request) cluster = await director_v2_api.get_cluster( @@ -126,7 +126,7 @@ async def get_cluster(request: web.Request) -> web.Response: @permission_required("clusters.write") @_handle_cluster_exceptions async def update_cluster(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ClusterPathParams, request) cluster_patch = await parse_request_body_as(ClusterPatch, request) @@ -146,7 +146,7 @@ async def update_cluster(request: web.Request) -> web.Response: @permission_required("clusters.delete") @_handle_cluster_exceptions async def delete_cluster(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ClusterPathParams, request) await director_v2_api.delete_cluster( @@ -165,7 +165,7 @@ async def delete_cluster(request: web.Request) -> web.Response: @permission_required("clusters.read") @_handle_cluster_exceptions async def get_cluster_details(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ClusterPathParams, request) cluster_details = await director_v2_api.get_cluster_details( @@ -199,7 +199,7 @@ async def ping_cluster(request: web.Request) -> web.Response: @permission_required("clusters.read") @_handle_cluster_exceptions async def ping_cluster_cluster_id(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ClusterPathParams, request) await director_v2_api.ping_specific_cluster( diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py index 13154bf5723..b57ff10f734 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py @@ -99,7 +99,7 @@ def _get_client_session_info(): return info - check = AppStatusCheck.parse_obj( + check = AppStatusCheck.model_validate( { "app_name": APP_NAME, "version": API_VERSION, diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py b/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py index 950d92fa2eb..58f9ed4e87f 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_core_computations.py @@ -182,7 +182,7 @@ async def get_computation_task( computation_task_out_dict = await request_director_v2( app, "GET", backend_url, expected_status=web.HTTPOk ) - task_out = ComputationTask.parse_obj(computation_task_out_dict) + task_out = ComputationTask.model_validate(computation_task_out_dict) _logger.debug("found computation task: %s", f"{task_out=}") return task_out except DirectorServiceError as exc: diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py index fb80f3aa3fa..12a47a8e718 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_handlers.py @@ -67,7 +67,7 @@ class _ComputationStarted(BaseModel): async def start_computation(request: web.Request) -> web.Response: # pylint: disable=too-many-statements try: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) computations = ComputationsApi(request.app) run_policy = get_project_run_policy(request.app) @@ -188,7 +188,7 @@ async def start_computation(request: web.Request) -> web.Response: @permission_required("services.pipeline.*") @permission_required("project.read") async def stop_computation(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) computations = ComputationsApi(request.app) run_policy = get_project_run_policy(request.app) assert run_policy # nosec diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py index 1879ba714ad..68f2788c850 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py @@ -133,7 +133,7 @@ class Config: @permission_required("folder.create") @handle_folders_exceptions async def create_folder(request: web.Request): - req_ctx = FoldersRequestContext.parse_obj(request) + req_ctx = FoldersRequestContext.model_validate(request) body_params = await parse_request_body_as(CreateFolderBodyParams, request) folder = await _folders_api.create_folder( @@ -153,7 +153,7 @@ async def create_folder(request: web.Request): @permission_required("folder.read") @handle_folders_exceptions async def list_folders(request: web.Request): - req_ctx = FoldersRequestContext.parse_obj(request) + req_ctx = FoldersRequestContext.model_validate(request) query_params: FolderListWithJsonStrQueryParams = parse_request_query_parameters_as( FolderListWithJsonStrQueryParams, request ) @@ -169,7 +169,7 @@ async def list_folders(request: web.Request): order_by=parse_obj_as(OrderBy, query_params.order_by), ) - page = Page[FolderGet].parse_obj( + page = Page[FolderGet].model_validate( paginate_data( chunk=folders.items, request_url=request.url, @@ -189,7 +189,7 @@ async def list_folders(request: web.Request): @permission_required("folder.read") @handle_folders_exceptions async def get_folder(request: web.Request): - req_ctx = FoldersRequestContext.parse_obj(request) + req_ctx = FoldersRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) folder: FolderGet = await _folders_api.get_folder( @@ -210,7 +210,7 @@ async def get_folder(request: web.Request): @permission_required("folder.update") @handle_folders_exceptions async def replace_folder(request: web.Request): - req_ctx = FoldersRequestContext.parse_obj(request) + req_ctx = FoldersRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) body_params = await parse_request_body_as(PutFolderBodyParams, request) @@ -233,7 +233,7 @@ async def replace_folder(request: web.Request): @permission_required("folder.delete") @handle_folders_exceptions async def delete_folder_group(request: web.Request): - req_ctx = FoldersRequestContext.parse_obj(request) + req_ctx = FoldersRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) await _folders_api.delete_folder( diff --git a/services/web/server/src/simcore_service_webserver/groups/_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_handlers.py index 2f0b0411601..47d7ab89b53 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_handlers.py @@ -82,7 +82,7 @@ async def list_groups(request: web.Request): """ product: Product = get_current_product(request) - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) primary_group, user_groups, all_group = await api.list_user_groups_with_read_access( request.app, req_ctx.user_id @@ -120,7 +120,7 @@ class Config: @_handle_groups_exceptions async def get_group(request: web.Request): """Get one group details""" - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupPathParams, request) group = await api.get_user_group(request.app, req_ctx.user_id, path_params.gid) @@ -134,7 +134,7 @@ async def get_group(request: web.Request): @_handle_groups_exceptions async def create_group(request: web.Request): """Creates organization groups""" - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) new_group = await request.json() created_group = await api.create_user_group(request.app, req_ctx.user_id, new_group) @@ -149,7 +149,7 @@ async def create_group(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def update_group(request: web.Request): - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupPathParams, request) new_group_values = await request.json() @@ -165,7 +165,7 @@ async def update_group(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def delete_group(request: web.Request): - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupPathParams, request) await api.delete_user_group(request.app, req_ctx.user_id, path_params.gid) @@ -177,7 +177,7 @@ async def delete_group(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def get_group_users(request: web.Request): - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupPathParams, request) group_user = await api.list_users_in_group( @@ -195,7 +195,7 @@ async def add_group_user(request: web.Request): """ Adds a user in an organization group """ - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupPathParams, request) new_user_in_group = await request.json() @@ -234,7 +234,7 @@ async def get_group_user(request: web.Request): """ Gets specific user in group """ - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupUserPathParams, request) user = await api.get_user_in_group( request.app, req_ctx.user_id, path_params.gid, path_params.uid @@ -251,7 +251,7 @@ async def update_group_user(request: web.Request): """ Modify specific user in group """ - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupUserPathParams, request) new_values_for_user_in_group = await request.json() user = await api.update_user_in_group( @@ -270,7 +270,7 @@ async def update_group_user(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def delete_group_user(request: web.Request): - req_ctx = _GroupsRequestContext.parse_obj(request) + req_ctx = _GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GroupUserPathParams, request) await api.delete_user_in_group( request.app, req_ctx.user_id, path_params.gid, path_params.uid diff --git a/services/web/server/src/simcore_service_webserver/login/_registration.py b/services/web/server/src/simcore_service_webserver/login/_registration.py index 322dbb026c4..10089caae6f 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration.py @@ -276,7 +276,7 @@ async def check_and_consume_invitation( # database-type invitations if confirmation_token := await validate_confirmation_code(invitation_code, db, cfg): try: - invitation_data: InvitationData = _InvitationValidator.parse_obj( + invitation_data: InvitationData = _InvitationValidator.model_validate( confirmation_token ).data return invitation_data diff --git a/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py b/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py index 2ad608e3c12..26518312d7b 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration_handlers.py @@ -97,7 +97,7 @@ class _AuthenticatedContext(BaseModel): @login_required @permission_required("user.profile.delete") async def unregister_account(request: web.Request): - req_ctx = _AuthenticatedContext.parse_obj(request) + req_ctx = _AuthenticatedContext.model_validate(request) body = await parse_request_body_as(UnregisterCheck, request) product: Product = get_current_product(request) diff --git a/services/web/server/src/simcore_service_webserver/long_running_tasks.py b/services/web/server/src/simcore_service_webserver/long_running_tasks.py index a7e4e8c725b..2f42f18927e 100644 --- a/services/web/server/src/simcore_service_webserver/long_running_tasks.py +++ b/services/web/server/src/simcore_service_webserver/long_running_tasks.py @@ -28,7 +28,7 @@ async def _test_task_context_decorator( request: web.Request, ) -> web.StreamResponse: """this task context callback tries to get the user_id from the query if available""" - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] = jsonable_encoder(req_ctx) return await handler(request) diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py index 35244dc5363..91ca21e076c 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py @@ -292,7 +292,7 @@ async def list_project_iterations(request: web.Request) -> web.Response: for item in iterations_range.items ] - page = Page[ProjectIterationItem].parse_obj( + page = Page[ProjectIterationItem].model_validate( paginate_data( chunk=page_items, request_url=request.url, @@ -395,7 +395,7 @@ def _get_project_results(project_id) -> ExtractedResults: for item in iterations_range.items ] - page = Page[ProjectIterationResultItem].parse_obj( + page = Page[ProjectIterationResultItem].model_validate( paginate_data( chunk=page_items, request_url=request.url, diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py index 05bb9ac778b..f3fa8debb12 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py @@ -155,7 +155,7 @@ def from_tag_name( ) -> Optional["ProjectIteration"]: """Parses iteration info from tag name""" try: - return cls.parse_obj(parse_iteration_tag_name(tag_name)) + return cls.model_validate(parse_iteration_tag_name(tag_name)) except ValidationError as err: if return_none_if_fails: _logger.debug("%s", f"{err=}") @@ -216,7 +216,7 @@ async def get_or_create_runnable_projects( raise web.HTTPForbidden(reason="Unauthenticated request") from err project_nodes: dict[NodeID, Node] = { - nid: Node.parse_obj(n) for nid, n in project["workbench"].items() + nid: Node.model_validate(n) for nid, n in project["workbench"].items() } # init returns @@ -323,7 +323,7 @@ async def get_runnable_projects_ids( project: ProjectDict = await vc_repo.get_project(str(project_uuid)) assert project["uuid"] == str(project_uuid) # nosec project_nodes: dict[NodeID, Node] = { - nid: Node.parse_obj(n) for nid, n in project["workbench"].items() + nid: Node.model_validate(n) for nid, n in project["workbench"].items() } # init returns diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_api.py b/services/web/server/src/simcore_service_webserver/payments/_methods_api.py index a1eac2b440d..bf6c4a32a0b 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_methods_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_methods_api.py @@ -56,7 +56,7 @@ def _to_api_model( ) -> PaymentMethodGet: assert entry.completed_at # nosec - return PaymentMethodGet.parse_obj( + return PaymentMethodGet.model_validate( { **payment_method_details_from_gateway, "idr": entry.payment_method_id, diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py index f1c9f9df733..655af71fa7a 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py @@ -62,7 +62,7 @@ def _to_api_model( if transaction.invoice_url: data["invoice_url"] = transaction.invoice_url - return PaymentTransaction.parse_obj(data) + return PaymentTransaction.model_validate(data) @log_decorator(_logger, level=logging.INFO) diff --git a/services/web/server/src/simcore_service_webserver/products/_handlers.py b/services/web/server/src/simcore_service_webserver/products/_handlers.py index bfdabef6d6f..a9d6b964961 100644 --- a/services/web/server/src/simcore_service_webserver/products/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/products/_handlers.py @@ -36,7 +36,7 @@ class _ProductsRequestContext(RequestParams): @login_required @permission_required("product.price.read") async def _get_current_product_price(request: web.Request): - req_ctx = _ProductsRequestContext.parse_obj(request) + req_ctx = _ProductsRequestContext.model_validate(request) price_info = await _api.get_current_product_credit_price_info(request) credit_price = GetCreditPrice( @@ -57,7 +57,7 @@ class _ProductsRequestParams(StrictRequestParams): @login_required @permission_required("product.details.*") async def _get_product(request: web.Request): - req_ctx = _ProductsRequestContext.parse_obj(request) + req_ctx = _ProductsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProductsRequestParams, request) if path_params.product_name == "current": @@ -86,7 +86,7 @@ class _ProductTemplateParams(_ProductsRequestParams): @login_required @permission_required("product.details.*") async def update_product_template(request: web.Request): - req_ctx = _ProductsRequestContext.parse_obj(request) + req_ctx = _ProductsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProductTemplateParams, request) assert req_ctx # nosec diff --git a/services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py b/services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py index a300a4c43e9..4f530d3b566 100644 --- a/services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py +++ b/services/web/server/src/simcore_service_webserver/products/_invitations_handlers.py @@ -35,7 +35,7 @@ class _ProductsRequestContext(RequestParams): @login_required @permission_required("product.invitations.create") async def generate_invitation(request: web.Request): - req_ctx = _ProductsRequestContext.parse_obj(request) + req_ctx = _ProductsRequestContext.model_validate(request) body = await parse_request_body_as(GenerateInvitation, request) _, user_email = await get_user_name_and_email(request.app, user_id=req_ctx.user_id) diff --git a/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py index fff41cd016c..4ee774a22a2 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py @@ -86,7 +86,7 @@ class Config: @permission_required("project.read") @_handle_project_comments_exceptions async def create_project_comment(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectCommentsPathParams, request) body_params = await parse_request_body_as(_ProjectCommentsBodyParams, request) @@ -128,7 +128,7 @@ class Config: @permission_required("project.read") @_handle_project_comments_exceptions async def list_project_comments(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectCommentsPathParams, request) query_params: _ListProjectCommentsQueryParams = parse_request_query_parameters_as( _ListProjectCommentsQueryParams, request @@ -154,7 +154,7 @@ async def list_project_comments(request: web.Request): limit=query_params.limit, ) - page = Page[dict[str, Any]].parse_obj( + page = Page[dict[str, Any]].model_validate( paginate_data( chunk=project_comments, request_url=request.url, @@ -176,7 +176,7 @@ async def list_project_comments(request: web.Request): @login_required @permission_required("project.read") async def update_project_comment(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectCommentsWithCommentPathParams, request ) @@ -206,7 +206,7 @@ async def update_project_comment(request: web.Request): @permission_required("project.read") @_handle_project_comments_exceptions async def delete_project_comment(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectCommentsWithCommentPathParams, request ) @@ -234,7 +234,7 @@ async def delete_project_comment(request: web.Request): @permission_required("project.read") @_handle_project_comments_exceptions async def get_project_comment(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectCommentsWithCommentPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index 1d6846911d2..3be4c48051e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -407,7 +407,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche } # Ensures is like ProjectGet - data = ProjectGet.parse_obj(new_project).data(exclude_unset=True) + data = ProjectGet.model_validate(new_project).data(exclude_unset=True) raise web.HTTPCreated( text=json_dumps({"data": data}), diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index 7efd1dd600c..445f9b72a33 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -58,7 +58,7 @@ async def _append_fields( } # validate - return model_schema_cls.parse_obj(project).data(exclude_unset=True) + return model_schema_cls.model_validate(project).data(exclude_unset=True) async def list_projects( # pylint: disable=too-many-arguments diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index 9f138ebbb97..8b3a5056963 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -125,7 +125,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db async def create_project(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) query_params: ProjectCreateParams = parse_request_query_parameters_as( ProjectCreateParams, request ) @@ -192,7 +192,7 @@ async def list_projects(request: web.Request): web.HTTPUnprocessableEntity: (422) if validation of request parameters fail """ - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) query_params: ProjectListWithJsonStrParams = parse_request_query_parameters_as( ProjectListWithJsonStrParams, request ) @@ -211,7 +211,7 @@ async def list_projects(request: web.Request): workspace_id=query_params.workspace_id, ) - page = Page[ProjectDict].parse_obj( + page = Page[ProjectDict].model_validate( paginate_data( chunk=projects, request_url=request.url, @@ -242,7 +242,7 @@ async def get_active_project(request: web.Request) -> web.Response: web.HTTPUnprocessableEntity: (422) if validation of request parameters fail web.HTTPNotFound: If active project is not found """ - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) query_params: ProjectActiveParams = parse_request_query_parameters_as( ProjectActiveParams, request ) @@ -267,7 +267,7 @@ async def get_active_project(request: web.Request) -> web.Response: # updates project's permalink field await update_or_pop_permalink_in_project(request, project) - data = ProjectGet.parse_obj(project).data(exclude_unset=True) + data = ProjectGet.model_validate(project).data(exclude_unset=True) return web.json_response({"data": data}, dumps=json_dumps) @@ -288,7 +288,7 @@ async def get_project(request: web.Request): web.HTTPNotFound: This project was not found """ - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) user_available_services: list[dict] = await get_services_for_user_in_product( @@ -323,7 +323,7 @@ async def get_project(request: web.Request): # Adds permalink await update_or_pop_permalink_in_project(request, project) - data = ProjectGet.parse_obj(project).data(exclude_unset=True) + data = ProjectGet.model_validate(project).data(exclude_unset=True) return web.json_response({"data": data}, dumps=json_dumps) except ProjectInvalidRightsError as exc: @@ -381,7 +381,7 @@ async def replace_project(request: web.Request): """ db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) try: @@ -407,7 +407,7 @@ async def replace_project(request: web.Request): ) try: - Project.parse_obj(new_project) # validate + Project.model_validate(new_project) # validate current_project = await projects_api.get_project_for_user( request.app, @@ -510,7 +510,7 @@ async def replace_project(request: web.Request): @permission_required("services.pipeline.*") @_handle_projects_exceptions async def patch_project(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) project_patch = await parse_request_body_as(ProjectPatch, request) @@ -547,7 +547,7 @@ async def delete_project(request: web.Request): web.HTTPNoContent: Sucess """ - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) try: @@ -622,7 +622,7 @@ async def delete_project(request: web.Request): @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db async def clone_project(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) return await start_long_running_task( diff --git a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py index 14a0eae1306..b41b61af9cd 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py @@ -382,7 +382,7 @@ def patch_workbench( raise ProjectInvalidUsageError # if it's a new node, let's check that it validates try: - Node.parse_obj(new_node_data) + Node.model_validate(new_node_data) patched_project["workbench"][node_key] = new_node_data changed_entries.update({node_key: new_node_data}) except ValidationError as err: diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py index 591fecf8a94..0c5d7960a5b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py @@ -59,7 +59,7 @@ class Config: @permission_required("project.folders.*") @_handle_projects_folders_exceptions async def replace_project_folder(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsFoldersPathParams, request) await _folders_api.move_project_into_folder( diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py index 607dd499df2..97a1120e623 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py @@ -74,7 +74,7 @@ class Config: @permission_required("project.access_rights.update") @_handle_projects_groups_exceptions async def create_project_group(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsGroupsPathParams, request) body_params = await parse_request_body_as(_ProjectsGroupsBodyParams, request) @@ -97,7 +97,7 @@ async def create_project_group(request: web.Request): @permission_required("project.read") @_handle_projects_groups_exceptions async def list_project_groups(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) project_groups: list[ @@ -120,7 +120,7 @@ async def list_project_groups(request: web.Request): @permission_required("project.access_rights.update") @_handle_projects_groups_exceptions async def replace_project_group(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsGroupsPathParams, request) body_params = await parse_request_body_as(_ProjectsGroupsBodyParams, request) @@ -144,7 +144,7 @@ async def replace_project_group(request: web.Request): @permission_required("project.access_rights.update") @_handle_projects_groups_exceptions async def delete_project_group(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsGroupsPathParams, request) await _groups_api.delete_project_group( diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py index 614d0ba03b9..802c13f7937 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_handlers.py @@ -79,7 +79,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @permission_required("project.read") @_handle_project_exceptions async def get_project_metadata(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) custom_metadata = await _metadata_api.get_project_custom_metadata( @@ -99,7 +99,7 @@ async def get_project_metadata(request: web.Request) -> web.Response: @permission_required("project.update") @_handle_project_exceptions async def update_project_metadata(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) update = await parse_request_body_as(ProjectMetadataUpdate, request) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index 58b06af19e4..965386ea211 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -137,7 +137,7 @@ class NodePathParams(ProjectPathParams): @permission_required("project.node.create") @_handle_project_nodes_exceptions async def create_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) body = await parse_request_body_as(NodeCreate, request) @@ -180,7 +180,7 @@ async def create_node(request: web.Request) -> web.Response: @_handle_project_nodes_exceptions # NOTE: Careful, this endpoint is actually "get_node_state," and it doesn't return a Node resource. async def get_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -222,7 +222,7 @@ async def get_node(request: web.Request) -> web.Response: @permission_required("project.node.update") @_handle_project_nodes_exceptions async def patch_project_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) node_patch = await parse_request_body_as(NodePatch, request) @@ -243,7 +243,7 @@ async def patch_project_node(request: web.Request) -> web.Response: @permission_required("project.node.delete") @_handle_project_nodes_exceptions async def delete_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -291,7 +291,7 @@ async def retrieve_node(request: web.Request) -> web.Response: @permission_required("project.node.update") @_handle_project_nodes_exceptions async def update_node_outputs(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) node_outputs = await parse_request_body_as(NodeOutputs, request) @@ -319,7 +319,7 @@ async def update_node_outputs(request: web.Request) -> web.Response: @_handle_project_nodes_exceptions async def start_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) await projects_api.start_project_node( @@ -363,7 +363,7 @@ async def _stop_dynamic_service_task( @_handle_project_nodes_exceptions async def stop_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) save_state = await has_user_project_access_rights( @@ -426,7 +426,7 @@ async def restart_node(request: web.Request) -> web.Response: @permission_required("project.node.read") @_handle_project_nodes_exceptions async def get_node_resources(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -459,7 +459,7 @@ async def get_node_resources(request: web.Request) -> web.Response: @permission_required("project.node.update") @_handle_project_nodes_exceptions async def replace_node_resources(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) body = await parse_request_body_as(ServiceResourcesDict, request) @@ -520,7 +520,7 @@ class _ProjectGroupAccess(BaseModel): async def get_project_services_access_for_gid( request: web.Request, ) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params: _ServicesAccessQuery = parse_request_query_parameters_as( _ServicesAccessQuery, request @@ -636,7 +636,7 @@ class _ProjectNodePreview(BaseModel): @permission_required("project.read") @_handle_project_nodes_exceptions async def list_project_nodes_previews(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert req_ctx # nosec @@ -646,7 +646,7 @@ async def list_project_nodes_previews(request: web.Request) -> web.Response: project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, ) - project = Project.parse_obj(project_data) + project = Project.model_validate(project_data) for node_id, node in project.workbench.items(): screenshots = await get_node_screenshots( @@ -676,7 +676,7 @@ async def list_project_nodes_previews(request: web.Request) -> web.Response: @permission_required("project.read") @_handle_project_nodes_exceptions async def get_project_node_preview(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) assert req_ctx # nosec @@ -686,7 +686,7 @@ async def get_project_node_preview(request: web.Request) -> web.Response: user_id=req_ctx.user_id, ) - project = Project.parse_obj(project_data) + project = Project.model_validate(project_data) node = project.workbench.get(NodeIDStr(path_params.node_id)) if node is None: diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py b/services/web/server/src/simcore_service_webserver/projects/_ports_api.py index 95a42f32046..c31de86d4fa 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_ports_api.py @@ -181,7 +181,7 @@ def _get_outputs_in_workbench(workbench: dict[NodeID, Node]) -> dict[NodeID, Any if port.node.inputs: try: # Every port is associated to the output of a task - port_link = _NonStrictPortLink.parse_obj( + port_link = _NonStrictPortLink.model_validate( port.node.inputs[KeyIDStr("in_1")] ) # Here we resolve which task and which tasks' output is associated to this port? diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py index 0d2fb6f3eca..e34548a9022 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py @@ -103,7 +103,7 @@ async def _get_validated_workbench_model( @permission_required("project.read") @_handle_project_exceptions async def get_project_inputs(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert request.app # nosec @@ -129,7 +129,7 @@ async def get_project_inputs(request: web.Request) -> web.Response: @_handle_project_exceptions async def update_project_inputs(request: web.Request) -> web.Response: db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) inputs_updates = await parse_request_body_as(list[ProjectInputUpdate], request) @@ -192,7 +192,7 @@ async def update_project_inputs(request: web.Request) -> web.Response: @permission_required("project.read") @_handle_project_exceptions async def get_project_outputs(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert request.app # nosec @@ -239,7 +239,7 @@ class ProjectMetadataPortGet(BaseModel): @permission_required("project.read") @_handle_project_exceptions async def list_project_metadata_ports(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert request.app # nosec diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py index 552869a0404..25e9ef46876 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py @@ -64,7 +64,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @_handle_projects_nodes_pricing_unit_exceptions async def get_project_node_pricing_unit(request: web.Request): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -113,7 +113,7 @@ class Config: @_handle_projects_nodes_pricing_unit_exceptions async def connect_pricing_unit_to_project_node(request: web.Request): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectNodePricingUnitPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py index cc0e5b7ef57..41285dec559 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py @@ -86,7 +86,7 @@ class _OpenProjectQuery(BaseModel): @permission_required("project.open") @_handle_project_exceptions async def open_project(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params: _OpenProjectQuery = parse_request_query_parameters_as( _OpenProjectQuery, request @@ -189,7 +189,7 @@ async def open_project(request: web.Request) -> web.Response: @permission_required("project.close") @_handle_project_exceptions async def close_project(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) try: @@ -227,7 +227,7 @@ async def close_project(request: web.Request) -> web.Response: @login_required @permission_required("project.read") async def get_project_state(request: web.Request) -> web.Response: - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) # check that project exists and queries state diff --git a/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py index 04c6fd3f218..dfa85fdb8e1 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py @@ -49,7 +49,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @permission_required("project.wallet.*") @_handle_project_wallet_exceptions async def get_project_wallet(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) # ensure the project exists @@ -82,7 +82,7 @@ class Config: @permission_required("project.wallet.*") @_handle_project_wallet_exceptions async def connect_wallet_to_project(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectWalletPathParams, request) # ensure the project exists diff --git a/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py index e262ce8dc29..667c5159689 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py @@ -69,7 +69,7 @@ class Config: @permission_required("project.workspaces.*") @_handle_projects_workspaces_exceptions async def replace_project_workspace(request: web.Request): - req_ctx = RequestContext.parse_obj(request) + req_ctx = RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectWorkspacesPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index fc130436caf..28fe89b1110 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -211,7 +211,7 @@ async def get_project_for_user( gid: access.dict() for gid, access in workspace_db.access_rights.items() } - Project.parse_obj(project) # NOTE: only validates + Project.model_validate(project) # NOTE: only validates return project @@ -790,7 +790,7 @@ async def add_project_node( ProjectNodeCreate( node_id=node_uuid, required_resources=jsonable_encoder(default_resources) ), - Node.parse_obj( + Node.model_validate( { "key": service_key, "version": service_version, diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py index b71317b1aab..edfcea623d0 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py @@ -85,7 +85,7 @@ class Config: @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def list_pricing_plans(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) pricing_plans_list = await admin_api.list_pricing_plans( app=request.app, @@ -116,7 +116,7 @@ async def list_pricing_plans(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def get_pricing_plan(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingPlanPathParams, request) pricing_plan_get = await admin_api.get_pricing_plan( @@ -159,7 +159,7 @@ async def get_pricing_plan(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def create_pricing_plan(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) body_params = await parse_request_body_as(CreatePricingPlanBodyParams, request) _data = PricingPlanCreate( @@ -208,7 +208,7 @@ async def create_pricing_plan(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def update_pricing_plan(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingPlanPathParams, request) body_params = await parse_request_body_as(UpdatePricingPlanBodyParams, request) @@ -269,7 +269,7 @@ class Config: @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def get_pricing_unit(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingUnitPathParams, request) pricing_unit_get = await admin_api.get_pricing_unit( @@ -299,7 +299,7 @@ async def get_pricing_unit(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def create_pricing_unit(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingPlanPathParams, request) body_params = await parse_request_body_as(CreatePricingUnitBodyParams, request) @@ -338,7 +338,7 @@ async def create_pricing_unit(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def update_pricing_unit(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingUnitPathParams, request) body_params = await parse_request_body_as(UpdatePricingUnitBodyParams, request) @@ -380,7 +380,7 @@ async def update_pricing_unit(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def list_connected_services_to_pricing_plan(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingPlanPathParams, request) connected_services_list = await admin_api.list_connected_services_to_pricing_plan( @@ -409,7 +409,7 @@ async def list_connected_services_to_pricing_plan(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def connect_service_to_pricing_plan(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_GetPricingPlanPathParams, request) body_params = await parse_request_body_as( ConnectServiceToPricingPlanBodyParams, request diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py index 86072f00e5e..76d3466f56d 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py @@ -62,7 +62,7 @@ class Config: @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def get_pricing_plan_unit(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _GetPricingPlanUnitPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py index f265e45faf1..227c941bd38 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py @@ -155,7 +155,7 @@ class Config: @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def list_resource_usage_services(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) query_params: _ListServicesResourceUsagesQueryParamsWithPagination = ( parse_request_query_parameters_as( _ListServicesResourceUsagesQueryParamsWithPagination, request @@ -173,7 +173,7 @@ async def list_resource_usage_services(request: web.Request): filters=parse_obj_as(ServiceResourceUsagesFilters | None, query_params.filters), # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 ) - page = Page[dict[str, Any]].parse_obj( + page = Page[dict[str, Any]].model_validate( paginate_data( chunk=services.items, request_url=request.url, @@ -196,7 +196,7 @@ async def list_resource_usage_services(request: web.Request): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def list_osparc_credits_aggregated_usages(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) query_params: _ListServicesAggregatedUsagesQueryParams = ( parse_request_query_parameters_as( _ListServicesAggregatedUsagesQueryParams, request @@ -216,7 +216,7 @@ async def list_osparc_credits_aggregated_usages(request: web.Request): ) ) - page = Page[dict[str, Any]].parse_obj( + page = Page[dict[str, Any]].model_validate( paginate_data( chunk=aggregated_services.items, request_url=request.url, @@ -236,7 +236,7 @@ async def list_osparc_credits_aggregated_usages(request: web.Request): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def export_resource_usage_services(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) query_params: _ListServicesResourceUsagesQueryParams = ( parse_request_query_parameters_as( _ListServicesResourceUsagesQueryParams, request diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/_resolver.py b/services/web/server/src/simcore_service_webserver/scicrunch/_resolver.py index efef7f77668..07d62498230 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/_resolver.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/_resolver.py @@ -93,7 +93,7 @@ async def resolve_rrid( body = await resp.json() # process and simplify response - resolved = ResolverResponseBody.parse_obj(body) + resolved = ResolverResponseBody.model_validate(body) if resolved.hits.total == 0: return [] @@ -113,7 +113,7 @@ async def resolve_rrid( items = [] for hit in resolved.hits.hits: try: - items.append(ResolvedItem.parse_obj(hit.source.flatten_dict())) + items.append(ResolvedItem.model_validate(hit.source.flatten_dict())) except ValidationError as err: logger.warning("Skipping unexpected response %s: %s", url, err) diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/_rest.py b/services/web/server/src/simcore_service_webserver/scicrunch/_rest.py index 70e4963fc68..3f6285766b8 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/_rest.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/_rest.py @@ -120,4 +120,4 @@ async def autocomplete_by_name( ) as resp: body = await resp.json() assert body.get("success") # nosec - return ListOfResourceHits.parse_obj(body.get("data", [])) + return ListOfResourceHits.model_validate(body.get("data", [])) diff --git a/services/web/server/src/simcore_service_webserver/storage/_handlers.py b/services/web/server/src/simcore_service_webserver/storage/_handlers.py index f5acb0171b1..ec07a27d449 100644 --- a/services/web/server/src/simcore_service_webserver/storage/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/storage/_handlers.py @@ -237,7 +237,7 @@ class _QueryParams(BaseModel): payload, status = await _forward_request_to_storage(request, "PUT", body=None) data, _ = unwrap_envelope(payload) - file_upload_schema = FileUploadSchema.parse_obj(data) + file_upload_schema = FileUploadSchema.model_validate(data) file_upload_schema.links.complete_upload = _from_storage_url( request, file_upload_schema.links.complete_upload ) @@ -265,7 +265,7 @@ class _PathParams(BaseModel): request, "POST", body=body_item.dict() ) data, _ = unwrap_envelope(payload) - file_upload_complete = FileUploadCompleteResponse.parse_obj(data) + file_upload_complete = FileUploadCompleteResponse.model_validate(data) file_upload_complete.links.state = _from_storage_url( request, file_upload_complete.links.state ) diff --git a/services/web/server/src/simcore_service_webserver/storage/api.py b/services/web/server/src/simcore_service_webserver/storage/api.py index 2ddf66d8907..cc234f02530 100644 --- a/services/web/server/src/simcore_service_webserver/storage/api.py +++ b/services/web/server/src/simcore_service_webserver/storage/api.py @@ -56,7 +56,7 @@ async def get_storage_locations( locations_url = (api_endpoint / "locations").with_query(user_id=user_id) async with session.get(f"{locations_url}") as response: response.raise_for_status() - locations_enveloped = Envelope[FileLocationArray].parse_obj( + locations_enveloped = Envelope[FileLocationArray].model_validate( await response.json() ) assert locations_enveloped.data # nosec @@ -89,9 +89,9 @@ async def get_project_total_size_simcore_s3( ).with_query(user_id=user_id, project_id=f"{project_uuid}") async with session.get(f"{files_metadata_url}") as response: response.raise_for_status() - list_of_files_enveloped = Envelope[list[FileMetaDataGet]].parse_obj( - await response.json() - ) + list_of_files_enveloped = Envelope[ + list[FileMetaDataGet] + ].model_validate(await response.json()) assert list_of_files_enveloped.data is not None # nosec project_size_bytes += sum( file_metadata.file_size @@ -204,7 +204,7 @@ async def get_download_link( async with session.get(f"{url}") as response: response.raise_for_status() download: PresignedLink | None = ( - Envelope[PresignedLink].parse_obj(await response.json()).data + Envelope[PresignedLink].model_validate(await response.json()).data ) assert download is not None # nosec link: HttpUrl = parse_obj_as(HttpUrl, download.link) @@ -227,7 +227,7 @@ async def get_files_in_node_folder( async with session.get(f"{files_metadata_url}") as response: response.raise_for_status() - list_of_files_enveloped = Envelope[list[FileMetaDataGet]].parse_obj( + list_of_files_enveloped = Envelope[list[FileMetaDataGet]].model_validate( await response.json() ) assert list_of_files_enveloped.data is not None # nosec diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py index e4b71213ee6..af649fec1ea 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py @@ -60,7 +60,7 @@ def _create_file_picker(download_link: str, output_label: str | None): data["label"] = Path(output_label).name elif url.path: data["label"] = Path(url.path).name - output = DownloadLink.parse_obj(data) + output = DownloadLink.model_validate(data) output_id = "outFile" node = Node( diff --git a/services/web/server/src/simcore_service_webserver/tags/_handlers.py b/services/web/server/src/simcore_service_webserver/tags/_handlers.py index de0fc7dd5b1..f04e0fc2873 100644 --- a/services/web/server/src/simcore_service_webserver/tags/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/tags/_handlers.py @@ -56,7 +56,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @_handle_tags_exceptions async def create_tag(request: web.Request): engine: Engine = request.app[APP_DB_ENGINE_KEY] - req_ctx = TagRequestContext.parse_obj(request) + req_ctx = TagRequestContext.model_validate(request) new_tag = await parse_request_body_as(TagCreate, request) repo = TagsRepo(user_id=req_ctx.user_id) @@ -78,7 +78,7 @@ async def create_tag(request: web.Request): @_handle_tags_exceptions async def list_tags(request: web.Request): engine: Engine = request.app[APP_DB_ENGINE_KEY] - req_ctx = TagRequestContext.parse_obj(request) + req_ctx = TagRequestContext.model_validate(request) repo = TagsRepo(user_id=req_ctx.user_id) async with engine.acquire() as conn: @@ -94,7 +94,7 @@ async def list_tags(request: web.Request): @_handle_tags_exceptions async def update_tag(request: web.Request): engine: Engine = request.app[APP_DB_ENGINE_KEY] - req_ctx = TagRequestContext.parse_obj(request) + req_ctx = TagRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(TagPathParams, request) tag_updates = await parse_request_body_as(TagUpdate, request) @@ -113,7 +113,7 @@ async def update_tag(request: web.Request): @_handle_tags_exceptions async def delete_tag(request: web.Request): engine: Engine = request.app[APP_DB_ENGINE_KEY] - req_ctx = TagRequestContext.parse_obj(request) + req_ctx = TagRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(TagPathParams, request) repo = TagsRepo(user_id=req_ctx.user_id) diff --git a/services/web/server/src/simcore_service_webserver/users/_handlers.py b/services/web/server/src/simcore_service_webserver/users/_handlers.py index ede00340d4e..e20aebc83c6 100644 --- a/services/web/server/src/simcore_service_webserver/users/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_handlers.py @@ -50,7 +50,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @login_required @_handle_users_exceptions async def get_my_profile(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) profile: ProfileGet = await api.get_user_profile( request.app, req_ctx.user_id, req_ctx.product_name ) @@ -62,7 +62,7 @@ async def get_my_profile(request: web.Request) -> web.Response: @permission_required("user.profile.update") @_handle_users_exceptions async def update_my_profile(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) profile_update = await parse_request_body_as(ProfileUpdate, request) await api.update_user_profile( request.app, req_ctx.user_id, profile_update, as_patch=False @@ -87,7 +87,7 @@ class _SearchQueryParams(BaseModel): @permission_required("user.users.*") @_handle_users_exceptions async def search_users(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) assert req_ctx.product_name # nosec query_params: _SearchQueryParams = parse_request_query_parameters_as( @@ -108,7 +108,7 @@ async def search_users(request: web.Request) -> web.Response: @permission_required("user.users.*") @_handle_users_exceptions async def pre_register_user(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) pre_user_profile = await parse_request_body_as(PreUserProfile, request) try: diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py b/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py index b30a435210b..41f3330af52 100644 --- a/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py @@ -52,7 +52,7 @@ async def _get_user_notifications( # Filter by product included = [product_name, "UNDEFINED"] filtered_notifications = [n for n in notifications if n["product"] in included] - return [UserNotification.parse_obj(x) for x in filtered_notifications] + return [UserNotification.model_validate(x) for x in filtered_notifications] @routes.get(f"/{API_VTAG}/me/notifications", name="list_user_notifications") @@ -60,7 +60,7 @@ async def _get_user_notifications( @permission_required("user.notifications.read") async def list_user_notifications(request: web.Request) -> web.Response: redis_client = get_redis_user_notifications_client(request.app) - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) product_name = get_product_name(request) notifications = await _get_user_notifications( redis_client, req_ctx.user_id, product_name @@ -99,7 +99,7 @@ class _NotificationPathParams(BaseModel): @permission_required("user.notifications.update") async def mark_notification_as_read(request: web.Request) -> web.Response: redis_client = get_redis_user_notifications_client(request.app) - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) req_path_params = parse_request_path_parameters_as(_NotificationPathParams, request) body = await parse_request_body_as(UserNotificationPatch, request) @@ -124,7 +124,7 @@ async def mark_notification_as_read(request: web.Request) -> web.Response: @login_required @permission_required("user.permissions.read") async def list_user_permissions(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) list_permissions: list[Permission] = await _api.list_user_permissions( request.app, req_ctx.user_id, req_ctx.product_name ) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py b/services/web/server/src/simcore_service_webserver/users/_preferences_api.py index 8e17a4a25d4..3215b4bc149 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_api.py @@ -96,7 +96,7 @@ def include_preference(identifier: PreferenceIdentifier) -> bool: return True aggregated_preferences: AggregatedPreferences = { - p.preference_identifier: Preference.parse_obj( + p.preference_identifier: Preference.model_validate( {"value": p.value, "default_value": p.get_default_value()} ) for p in await _get_frontend_user_preferences(app, user_id, product_name) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_db.py b/services/web/server/src/simcore_service_webserver/users/_preferences_db.py index 45903403af9..e64ce5e579b 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_db.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_db.py @@ -31,7 +31,7 @@ async def get_user_preference( return ( None if preference_payload is None - else preference_class.parse_obj(preference_payload) + else preference_class.model_validate(preference_payload) ) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py b/services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py index 9f5513b904f..35f7b7118a9 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_handlers.py @@ -55,7 +55,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @login_required @_handle_users_exceptions async def set_frontend_preference(request: web.Request) -> web.Response: - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) req_body = await parse_request_body_as(PatchRequestBody, request) req_path_params = parse_request_path_parameters_as(PatchPathParams, request) diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py b/services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py index ddca5a94b2f..77f0551333d 100644 --- a/services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_tokens_handlers.py @@ -44,7 +44,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: @_handle_tokens_errors @permission_required("user.tokens.*") async def list_tokens(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) all_tokens = await _tokens.list_tokens(request.app, req_ctx.user_id) return envelope_json_response(all_tokens) @@ -54,7 +54,7 @@ async def list_tokens(request: web.Request) -> web.Response: @_handle_tokens_errors @permission_required("user.tokens.*") async def create_token(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) token_create = await parse_request_body_as(TokenCreate, request) await _tokens.create_token(request.app, req_ctx.user_id, token_create) return envelope_json_response(token_create, web.HTTPCreated) @@ -69,7 +69,7 @@ class _TokenPathParams(BaseModel): @_handle_tokens_errors @permission_required("user.tokens.*") async def get_token(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) req_path_params = parse_request_path_parameters_as(_TokenPathParams, request) token = await _tokens.get_token( request.app, req_ctx.user_id, req_path_params.service @@ -82,7 +82,7 @@ async def get_token(request: web.Request) -> web.Response: @_handle_tokens_errors @permission_required("user.tokens.*") async def delete_token(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.parse_obj(request) + req_ctx = UsersRequestContext.model_validate(request) req_path_params = parse_request_path_parameters_as(_TokenPathParams, request) await _tokens.delete_token(request.app, req_ctx.user_id, req_path_params.service) raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON) diff --git a/services/web/server/src/simcore_service_webserver/version_control/_core.py b/services/web/server/src/simcore_service_webserver/version_control/_core.py index 53f10829b48..c8d444339ea 100644 --- a/services/web/server/src/simcore_service_webserver/version_control/_core.py +++ b/services/web/server/src/simcore_service_webserver/version_control/_core.py @@ -136,7 +136,7 @@ async def get_workbench( # prefer actual project to snapshot content = await vc_repo.get_workbench_view(repo_id, commit_id) - return WorkbenchView.parse_obj(content) + return WorkbenchView.model_validate(content) # diff --git a/services/web/server/src/simcore_service_webserver/version_control/_handlers.py b/services/web/server/src/simcore_service_webserver/version_control/_handlers.py index 0cf849effb0..08791750ec0 100644 --- a/services/web/server/src/simcore_service_webserver/version_control/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/version_control/_handlers.py @@ -81,7 +81,7 @@ async def _list_repos_handler(request: web.Request): # parse and validate repos_list = [ - RepoApiModel.parse_obj( + RepoApiModel.model_validate( { "url": url_for("list_repos"), **dict(row.items()), @@ -90,7 +90,7 @@ async def _list_repos_handler(request: web.Request): for row in repos_rows ] - page = Page[RepoApiModel].parse_obj( + page = Page[RepoApiModel].model_validate( paginate_data( chunk=repos_list, request_url=request.url, @@ -116,7 +116,7 @@ async def _create_checkpoint_handler(request: web.Request): vc_repo = VersionControlRepository.create_from_request(request) path_params = parse_request_path_parameters_as(_ProjectPathParam, request) - _body = CheckpointNew.parse_obj(await request.json()) + _body = CheckpointNew.model_validate(await request.json()) checkpoint: Checkpoint = await create_checkpoint( vc_repo, @@ -124,7 +124,7 @@ async def _create_checkpoint_handler(request: web.Request): **_body.dict(include={"tag", "message"}), ) - data = CheckpointApiModel.parse_obj( + data = CheckpointApiModel.model_validate( { "url": url_for( "get_checkpoint", @@ -163,7 +163,7 @@ async def _list_checkpoints_handler(request: web.Request): # parse and validate checkpoints_list = [ - CheckpointApiModel.parse_obj( + CheckpointApiModel.model_validate( { "url": url_for( "get_checkpoint", @@ -176,7 +176,7 @@ async def _list_checkpoints_handler(request: web.Request): for checkpoint in checkpoints ] - page = Page[CheckpointApiModel].parse_obj( + page = Page[CheckpointApiModel].model_validate( paginate_data( chunk=checkpoints_list, request_url=request.url, @@ -211,7 +211,7 @@ async def _get_checkpoint_handler(request: web.Request): ref_id=path_params.ref_id, ) - data = CheckpointApiModel.parse_obj( + data = CheckpointApiModel.model_validate( { "url": url_for( "get_checkpoint", @@ -245,7 +245,7 @@ async def _update_checkpoint_annotations_handler(request: web.Request): **update.dict(include={"tag", "message"}, exclude_none=True), ) - data = CheckpointApiModel.parse_obj( + data = CheckpointApiModel.model_validate( { "url": url_for( "get_checkpoint", @@ -277,7 +277,7 @@ async def _checkout_handler(request: web.Request): ref_id=path_params.ref_id, ) - data = CheckpointApiModel.parse_obj( + data = CheckpointApiModel.model_validate( { "url": url_for( "get_checkpoint", @@ -315,7 +315,7 @@ async def _view_project_workbench_handler(request: web.Request): ref_id=checkpoint.id, ) - data = WorkbenchViewApiModel.parse_obj( + data = WorkbenchViewApiModel.model_validate( { # = request.url?? "url": url_for( diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py index 6690d6d41e4..87109220837 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py @@ -81,7 +81,7 @@ class Config: @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def create_wallet_group(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WalletsGroupsPathParams, request) body_params = await parse_request_body_as(_WalletsGroupsBodyParams, request) @@ -104,7 +104,7 @@ async def create_wallet_group(request: web.Request): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def list_wallet_groups(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) wallets: list[ @@ -127,7 +127,7 @@ async def list_wallet_groups(request: web.Request): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def update_wallet_group(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WalletsGroupsPathParams, request) body_params = await parse_request_body_as(_WalletsGroupsBodyParams, request) @@ -151,7 +151,7 @@ async def update_wallet_group(request: web.Request): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def delete_wallet_group(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WalletsGroupsPathParams, request) await _groups_api.delete_wallet_group( diff --git a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py index 0b43bbea59a..a8bbc7e9042 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py @@ -105,7 +105,7 @@ class WalletsPathParams(StrictRequestParams): @permission_required("wallets.*") @handle_wallets_exceptions async def create_wallet(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) body_params = await parse_request_body_as(CreateWalletBodyParams, request) wallet: WalletGet = await _api.create_wallet( @@ -125,7 +125,7 @@ async def create_wallet(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def list_wallets(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) wallets: list[ WalletGetWithAvailableCredits @@ -141,7 +141,7 @@ async def list_wallets(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def get_default_wallet(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) wallet: WalletGetWithAvailableCredits = ( await _api.get_user_default_wallet_with_available_credits( @@ -156,7 +156,7 @@ async def get_default_wallet(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def get_wallet(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) wallet: WalletGetWithAvailableCredits = ( @@ -179,7 +179,7 @@ async def get_wallet(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def update_wallet(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) body_params = await parse_request_body_as(PutWalletBodyParams, request) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py index 27060372abd..736a3411cce 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py @@ -65,7 +65,7 @@ @permission_required("wallets.*") @handle_wallets_exceptions async def _create_payment(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) body_params = await parse_request_body_as(CreateWalletPayment, request) @@ -113,7 +113,7 @@ async def _list_all_payments(request: web.Request): be listed here. """ - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) query_params: PageQueryParameters = parse_request_query_parameters_as( PageQueryParameters, request ) @@ -126,7 +126,7 @@ async def _list_all_payments(request: web.Request): offset=query_params.offset, ) - page = Page[PaymentTransaction].parse_obj( + page = Page[PaymentTransaction].model_validate( paginate_data( chunk=payments, request_url=request.url, @@ -148,7 +148,7 @@ async def _list_all_payments(request: web.Request): @handle_wallets_exceptions async def _get_payment_invoice_link(request: web.Request): """Get invoice for concrete payment""" - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PaymentsPathParams, request) payment_invoice = await get_payment_invoice_url( @@ -174,7 +174,7 @@ class PaymentsPathParams(WalletsPathParams): @permission_required("wallets.*") @handle_wallets_exceptions async def _cancel_payment(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PaymentsPathParams, request) await api.cancel_payment_to_wallet( @@ -208,7 +208,7 @@ async def _init_creation_of_payment_method(request: web.Request): """Triggers the creation of a new payment method. Note that creating a payment-method follows the init-prompt-ack flow """ - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) with log_context( @@ -241,7 +241,7 @@ async def _init_creation_of_payment_method(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def _cancel_creation_of_payment_method(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PaymentMethodsPathParams, request) with log_context( @@ -272,7 +272,7 @@ async def _cancel_creation_of_payment_method(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def _list_payments_methods(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) payments_methods: list[PaymentMethodGet] = await list_wallet_payment_methods( @@ -292,7 +292,7 @@ async def _list_payments_methods(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def _get_payment_method(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PaymentMethodsPathParams, request) payment_method: PaymentMethodGet = await get_wallet_payment_method( @@ -313,7 +313,7 @@ async def _get_payment_method(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def _delete_payment_method(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PaymentMethodsPathParams, request) await delete_wallet_payment_method( @@ -337,7 +337,7 @@ async def _delete_payment_method(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def _pay_with_payment_method(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PaymentMethodsPathParams, request) body_params = await parse_request_body_as(CreateWalletPayment, request) @@ -409,7 +409,7 @@ async def _notify_payment_completed_after_response(app, user_id, payment): @permission_required("wallets.*") @handle_wallets_exceptions async def _get_wallet_autorecharge(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) auto_recharge = await get_wallet_payment_autorecharge( @@ -426,7 +426,7 @@ async def _get_wallet_autorecharge(request: web.Request): product_name=req_ctx.product_name, ) - return envelope_json_response(GetWalletAutoRecharge.parse_obj(auto_recharge)) + return envelope_json_response(GetWalletAutoRecharge.model_validate(auto_recharge)) @routes.put( @@ -437,7 +437,7 @@ async def _get_wallet_autorecharge(request: web.Request): @permission_required("wallets.*") @handle_wallets_exceptions async def _replace_wallet_autorecharge(request: web.Request): - req_ctx = WalletsRequestContext.parse_obj(request) + req_ctx = WalletsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) body_params = await parse_request_body_as(ReplaceWalletAutoRecharge, request) @@ -454,4 +454,4 @@ async def _replace_wallet_autorecharge(request: web.Request): wallet_id=path_params.wallet_id, new=body_params, ) - return envelope_json_response(GetWalletAutoRecharge.parse_obj(udpated)) + return envelope_json_response(GetWalletAutoRecharge.model_validate(udpated)) diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py index 0bf7d09eb68..1d926fdb14a 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py @@ -82,7 +82,7 @@ class Config: @permission_required("workspaces.*") @_handle_workspaces_groups_exceptions async def create_workspace_group(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WorkspacesGroupsPathParams, request) body_params = await parse_request_body_as(_WorkspacesGroupsBodyParams, request) @@ -105,7 +105,7 @@ async def create_workspace_group(request: web.Request): @permission_required("workspaces.*") @_handle_workspaces_groups_exceptions async def list_workspace_groups(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WorkspacesPathParams, request) workspaces: list[ @@ -128,7 +128,7 @@ async def list_workspace_groups(request: web.Request): @permission_required("workspaces.*") @_handle_workspaces_groups_exceptions async def replace_workspace_group(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WorkspacesGroupsPathParams, request) body_params = await parse_request_body_as(_WorkspacesGroupsBodyParams, request) @@ -152,7 +152,7 @@ async def replace_workspace_group(request: web.Request): @permission_required("workspaces.*") @_handle_workspaces_groups_exceptions async def delete_workspace_group(request: web.Request): - req_ctx = _RequestContext.parse_obj(request) + req_ctx = _RequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WorkspacesGroupsPathParams, request) await _groups_api.delete_workspace_group( diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py index 5cc49639334..a95a97aa4d0 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py @@ -101,7 +101,7 @@ class Config: @permission_required("workspaces.*") @handle_workspaces_exceptions async def create_workspace(request: web.Request): - req_ctx = WorkspacesRequestContext.parse_obj(request) + req_ctx = WorkspacesRequestContext.model_validate(request) body_params = await parse_request_body_as(CreateWorkspaceBodyParams, request) workspace: WorkspaceGet = await _workspaces_api.create_workspace( @@ -121,7 +121,7 @@ async def create_workspace(request: web.Request): @permission_required("workspaces.*") @handle_workspaces_exceptions async def list_workspaces(request: web.Request): - req_ctx = WorkspacesRequestContext.parse_obj(request) + req_ctx = WorkspacesRequestContext.model_validate(request) query_params: WorkspacesListWithJsonStrQueryParams = ( parse_request_query_parameters_as(WorkspacesListWithJsonStrQueryParams, request) ) @@ -135,7 +135,7 @@ async def list_workspaces(request: web.Request): order_by=parse_obj_as(OrderBy, query_params.order_by), ) - page = Page[WorkspaceGet].parse_obj( + page = Page[WorkspaceGet].model_validate( paginate_data( chunk=workspaces.items, request_url=request.url, @@ -155,7 +155,7 @@ async def list_workspaces(request: web.Request): @permission_required("workspaces.*") @handle_workspaces_exceptions async def get_workspace(request: web.Request): - req_ctx = WorkspacesRequestContext.parse_obj(request) + req_ctx = WorkspacesRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WorkspacesPathParams, request) workspace: WorkspaceGet = await _workspaces_api.get_workspace( @@ -176,7 +176,7 @@ async def get_workspace(request: web.Request): @permission_required("workspaces.*") @handle_workspaces_exceptions async def replace_workspace(request: web.Request): - req_ctx = WorkspacesRequestContext.parse_obj(request) + req_ctx = WorkspacesRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WorkspacesPathParams, request) body_params = await parse_request_body_as(PutWorkspaceBodyParams, request) @@ -200,7 +200,7 @@ async def replace_workspace(request: web.Request): @permission_required("workspaces.*") @handle_workspaces_exceptions async def delete_workspace(request: web.Request): - req_ctx = WorkspacesRequestContext.parse_obj(request) + req_ctx = WorkspacesRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WorkspacesPathParams, request) await _workspaces_api.delete_workspace( diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 3f47a75afc2..1a029f47352 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -326,7 +326,7 @@ async def _creator( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = TaskStatus.parse_obj(data) + task_status = TaskStatus.model_validate(data) assert task_status print(f"<-- status: {task_status.json(indent=2)}") assert task_status.done, "task incomplete" diff --git a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py b/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py index 014418a25fb..39e507b2fda 100644 --- a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py +++ b/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py @@ -145,7 +145,7 @@ async def test_scicrunch_get_fields_from_invalid_rrid( async def test_scicrunch_service_autocomplete_by_name(settings: SciCrunchSettings): - expected: list[dict[str, Any]] = ListOfResourceHits.parse_obj( + expected: list[dict[str, Any]] = ListOfResourceHits.model_validate( [ { "rid": "SCR_000860", diff --git a/services/web/server/tests/unit/isolated/test_catalog_api_units.py b/services/web/server/tests/unit/isolated/test_catalog_api_units.py index 479165189d2..39d1824a775 100644 --- a/services/web/server/tests/unit/isolated/test_catalog_api_units.py +++ b/services/web/server/tests/unit/isolated/test_catalog_api_units.py @@ -45,8 +45,8 @@ def test_can_connect_enums(unit_registry: UnitRegistry): } assert can_connect( - from_output=ServiceOutput.parse_obj(enum_port), - to_input=ServiceInput.parse_obj(enum_port), + from_output=ServiceOutput.model_validate(enum_port), + to_input=ServiceInput.model_validate(enum_port), units_registry=unit_registry, ) @@ -71,15 +71,15 @@ def test_can_connect_generic_data_types(unit_registry: UnitRegistry): # data:*/* -> data:text/plain assert can_connect( - from_output=ServiceOutput.parse_obj(file_picker_outfile), - to_input=ServiceInput.parse_obj(input_sleeper_input_1), + from_output=ServiceOutput.model_validate(file_picker_outfile), + to_input=ServiceInput.model_validate(input_sleeper_input_1), units_registry=unit_registry, ) # data:text/plain -> data:*/* assert can_connect( - from_output=ServiceOutput.parse_obj(input_sleeper_input_1), - to_input=ServiceInput.parse_obj(file_picker_outfile), + from_output=ServiceOutput.model_validate(input_sleeper_input_1), + to_input=ServiceInput.model_validate(file_picker_outfile), units_registry=unit_registry, ) @@ -127,15 +127,15 @@ def test_can_connect_no_units_with_units( ): # w/o -> w assert can_connect( - from_output=ServiceOutput.parse_obj(port_without_unit), - to_input=ServiceInput.parse_obj(port_with_unit), + from_output=ServiceOutput.model_validate(port_without_unit), + to_input=ServiceInput.model_validate(port_with_unit), units_registry=unit_registry, ) # w -> w/o assert can_connect( - from_output=ServiceOutput.parse_obj(port_with_unit), - to_input=ServiceInput.parse_obj(port_without_unit), + from_output=ServiceOutput.model_validate(port_with_unit), + to_input=ServiceInput.model_validate(port_without_unit), units_registry=unit_registry, ) @@ -178,8 +178,8 @@ def test_units_compatible( assert ( can_connect( - from_output=ServiceOutput.parse_obj(from_port), - to_input=ServiceInput.parse_obj(to_port), + from_output=ServiceOutput.model_validate(from_port), + to_input=ServiceInput.model_validate(to_port), units_registry=unit_registry, ) == are_compatible diff --git a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py index 0823f52b1b2..a770fd46f48 100644 --- a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py +++ b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py @@ -47,7 +47,7 @@ def mock_rpc_client( @pytest.fixture def dynamic_service_start() -> DynamicServiceStart: - return DynamicServiceStart.parse_obj( + return DynamicServiceStart.model_validate( DynamicServiceStart.Config.schema_extra["example"] ) @@ -55,9 +55,9 @@ def dynamic_service_start() -> DynamicServiceStart: @pytest.mark.parametrize( "expected_response", [ - NodeGet.parse_obj(NodeGet.Config.schema_extra["example"]), - NodeGetIdle.parse_obj(NodeGetIdle.Config.schema_extra["example"]), - DynamicServiceGet.parse_obj( + NodeGet.model_validate(NodeGet.Config.schema_extra["example"]), + NodeGetIdle.model_validate(NodeGetIdle.Config.schema_extra["example"]), + DynamicServiceGet.model_validate( DynamicServiceGet.Config.schema_extra["examples"][0] ), ], @@ -98,8 +98,8 @@ async def test_get_service_status_raises_rpc_server_error( @pytest.mark.parametrize( "expected_response", [ - NodeGet.parse_obj(NodeGet.Config.schema_extra["example"]), - DynamicServiceGet.parse_obj( + NodeGet.model_validate(NodeGet.Config.schema_extra["example"]), + DynamicServiceGet.model_validate( DynamicServiceGet.Config.schema_extra["examples"][0] ), ], diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index 7f686a44292..7226a4001f8 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -122,7 +122,7 @@ async def test_remove_orphaned_services_with_no_running_services_does_nothing( @pytest.fixture def faker_dynamic_service_get() -> Callable[[], DynamicServiceGet]: def _() -> DynamicServiceGet: - return DynamicServiceGet.parse_obj( + return DynamicServiceGet.model_validate( DynamicServiceGet.Config.schema_extra["examples"][1] ) diff --git a/services/web/server/tests/unit/isolated/test_groups_models.py b/services/web/server/tests/unit/isolated/test_groups_models.py index 10b49979b1c..5ad6f8863cc 100644 --- a/services/web/server/tests/unit/isolated/test_groups_models.py +++ b/services/web/server/tests/unit/isolated/test_groups_models.py @@ -14,7 +14,7 @@ def test_models_library_and_postgress_database_enums_are_equivalent(): def test_sanitize_legacy_data(): - users_group_1 = UsersGroup.parse_obj( + users_group_1 = UsersGroup.model_validate( { "gid": "27", "label": "A user", @@ -26,7 +26,7 @@ def test_sanitize_legacy_data(): assert users_group_1.thumbnail is None - users_group_2 = UsersGroup.parse_obj( + users_group_2 = UsersGroup.model_validate( { "gid": "27", "label": "A user", diff --git a/services/web/server/tests/unit/isolated/test_products_model.py b/services/web/server/tests/unit/isolated/test_products_model.py index b3ee823a37e..b78ad59ba5a 100644 --- a/services/web/server/tests/unit/isolated/test_products_model.py +++ b/services/web/server/tests/unit/isolated/test_products_model.py @@ -34,13 +34,13 @@ def test_product_examples( def test_product_to_static(): - product = Product.parse_obj(Product.Config.schema_extra["examples"][0]) + product = Product.model_validate(Product.Config.schema_extra["examples"][0]) assert product.to_statics() == { "displayName": "o²S²PARC", "supportEmail": "support@osparc.io", } - product = Product.parse_obj(Product.Config.schema_extra["examples"][2]) + product = Product.model_validate(Product.Config.schema_extra["examples"][2]) assert product.to_statics() == { "displayName": "o²S²PARC FOO", diff --git a/services/web/server/tests/unit/isolated/test_projects_utils.py b/services/web/server/tests/unit/isolated/test_projects_utils.py index e83e02e295f..0178882d760 100644 --- a/services/web/server/tests/unit/isolated/test_projects_utils.py +++ b/services/web/server/tests/unit/isolated/test_projects_utils.py @@ -58,7 +58,7 @@ def test_clone_project_document( # # SEE https://swagger.io/docs/specification/data-models/data-types/#Null - assert Project.parse_obj(clone) is not None + assert Project.model_validate(clone) is not None @pytest.mark.parametrize( @@ -145,4 +145,4 @@ def test_validate_project_json_schema(): with open(CURRENT_DIR / "data/project-data.json") as f: project: ProjectDict = json.load(f) - Project.parse_obj(project) + Project.model_validate(project) diff --git a/services/web/server/tests/unit/isolated/test_storage_schemas.py b/services/web/server/tests/unit/isolated/test_storage_schemas.py index c11ce1f1345..31ea4260bb4 100644 --- a/services/web/server/tests/unit/isolated/test_storage_schemas.py +++ b/services/web/server/tests/unit/isolated/test_storage_schemas.py @@ -20,4 +20,4 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json.dumps(example_data)) - assert model_cls.parse_obj(example_data) + assert model_cls.model_validate(example_data) diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py index 8faada91005..72b754cf225 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py @@ -50,7 +50,7 @@ async def test_create_project_with_viewer(view: dict[str, Any]): print(json.dumps(project_in, indent=2)) # This operation is done exactly before adding to the database in projects_handlers.create_projects - Project.parse_obj(project_in) + Project.model_validate(project_in) def test_url_quoting_and_validation(): @@ -71,14 +71,14 @@ def unquote_url(cls, v): w = w.replace(SPACE, "%20") return w - M.parse_obj( + M.model_validate( { # encoding %20 as %2520 "url": "https://raw.githubusercontent.com/pcrespov/osparc-sample-studies/master/files%2520samples/sample.ipynb" } ) - obj2 = M.parse_obj( + obj2 = M.model_validate( { # encoding space as %20 "url": "https://raw.githubusercontent.com/pcrespov/osparc-sample-studies/master/files%20samples/sample.ipynb" @@ -86,7 +86,7 @@ def unquote_url(cls, v): ) url_with_url_in_query = "http://127.0.0.1:9081/view?file_type=IPYNB&viewer_key=simcore/services/dynamic/jupyter-octave-python-math&viewer_version=1.6.9&file_size=1&download_link=https://raw.githubusercontent.com/pcrespov/osparc-sample-studies/master/files%2520samples/sample.ipynb" - obj4 = M.parse_obj({"url": URL(url_with_url_in_query).query["download_link"]}) + obj4 = M.model_validate({"url": URL(url_with_url_in_query).query["download_link"]}) assert obj2.url.path == obj4.url.path @@ -94,7 +94,7 @@ def unquote_url(cls, v): "https://raw.githubusercontent.com/pcrespov/osparc-sample-studies/master/files%20samples/sample.ipynb" ) M(url=quoted_url) - M.parse_obj({"url": url_with_url_in_query}) + M.model_validate({"url": url_with_url_in_query}) assert ( URL(url_with_url_in_query).query["download_link"] diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py index 0ab58dfd77e..3e14ad9150a 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py @@ -74,7 +74,7 @@ def file_and_service_params() -> dict[str, Any]: def test_download_link_validators_2(file_and_service_params: dict[str, Any]): - params = ServiceAndFileParams.parse_obj(file_and_service_params) + params = ServiceAndFileParams.model_validate(file_and_service_params) assert params.download_link diff --git a/services/web/server/tests/unit/isolated/test_user_notifications.py b/services/web/server/tests/unit/isolated/test_user_notifications.py index d606a84297f..ff53b0aa4fe 100644 --- a/services/web/server/tests/unit/isolated/test_user_notifications.py +++ b/services/web/server/tests/unit/isolated/test_user_notifications.py @@ -14,7 +14,7 @@ @pytest.mark.parametrize("raw_data", UserNotification.Config.schema_extra["examples"]) def test_user_notification(raw_data: dict[str, Any]): - assert UserNotification.parse_obj(raw_data) + assert UserNotification.model_validate(raw_data) @pytest.mark.parametrize("user_id", [10]) @@ -26,7 +26,7 @@ def test_get_notification_key(user_id: UserID): "request_data", [ pytest.param( - UserNotificationCreate.parse_obj( + UserNotificationCreate.model_validate( { "user_id": "1", "category": NotificationCategory.NEW_ORGANIZATION, @@ -40,7 +40,7 @@ def test_get_notification_key(user_id: UserID): id="normal_usage", ), pytest.param( - UserNotificationCreate.parse_obj( + UserNotificationCreate.model_validate( { "user_id": "1", "category": NotificationCategory.NEW_ORGANIZATION, @@ -55,7 +55,7 @@ def test_get_notification_key(user_id: UserID): id="read_is_always_set_false", ), pytest.param( - UserNotificationCreate.parse_obj( + UserNotificationCreate.model_validate( { "id": "some_id", "user_id": "1", @@ -70,7 +70,7 @@ def test_get_notification_key(user_id: UserID): id="a_new_id_is_alway_recreated", ), pytest.param( - UserNotificationCreate.parse_obj( + UserNotificationCreate.model_validate( { "id": "some_id", "user_id": "1", @@ -85,7 +85,7 @@ def test_get_notification_key(user_id: UserID): id="category_from_string", ), pytest.param( - UserNotificationCreate.parse_obj( + UserNotificationCreate.model_validate( { "id": "some_id", "user_id": "1", diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index ef5ee03c7b0..474cb2f1df9 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -134,5 +134,5 @@ def test_parsing_output_of_get_user_profile(): }, } - profile = ProfileGet.parse_obj(result_from_db_query_and_composition) + profile = ProfileGet.model_validate(result_from_db_query_and_composition) assert "password" not in profile.dict(exclude_unset=True) diff --git a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py index c3f6b1d8570..a0cd6542b53 100644 --- a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py @@ -45,18 +45,18 @@ def mocked_director_v2_api(mocker: MockerFixture): "simcore_service_webserver.clusters._handlers.director_v2_api", autospec=True ) - mocked_director_v2_api.create_cluster.return_value = Cluster.parse_obj( + mocked_director_v2_api.create_cluster.return_value = Cluster.model_validate( random.choice(Cluster.Config.schema_extra["examples"]) ) mocked_director_v2_api.list_clusters.return_value = [] - mocked_director_v2_api.get_cluster.return_value = Cluster.parse_obj( + mocked_director_v2_api.get_cluster.return_value = Cluster.model_validate( random.choice(Cluster.Config.schema_extra["examples"]) ) mocked_director_v2_api.get_cluster_details.return_value = { "scheduler": {"status": "running"}, "dashboardLink": "https://link.to.dashboard", } - mocked_director_v2_api.update_cluster.return_value = Cluster.parse_obj( + mocked_director_v2_api.update_cluster.return_value = Cluster.model_validate( random.choice(Cluster.Config.schema_extra["examples"]) ) mocked_director_v2_api.delete_cluster.return_value = None @@ -132,7 +132,7 @@ async def test_create_cluster( # we are done here return - created_cluster = Cluster.parse_obj(data) + created_cluster = Cluster.model_validate(data) assert created_cluster diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py index 7933270984a..dc76115c45b 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_handlers.py @@ -241,7 +241,7 @@ def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): print(example_name, ":", json_dumps(example_data)) - model = model_cls.parse_obj(example_data) + model = model_cls.model_validate(example_data) assert model diff --git a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py index 48aacf56c6c..cd9bc502089 100644 --- a/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/01/studies_dispatcher/test_studies_dispatcher_projects.py @@ -106,7 +106,7 @@ async def test_add_new_project_from_model_instance( project_id=project_id, service_id=viewer_id, owner=user, - service_info=ServiceInfo.parse_obj(viewer_info), + service_info=ServiceInfo.model_validate(viewer_info), ) else: project = _create_project_with_filepicker_and_service( diff --git a/services/web/server/tests/unit/with_dbs/02/test_announcements.py b/services/web/server/tests/unit/with_dbs/02/test_announcements.py index cd87e2526c6..19ca7413827 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_announcements.py +++ b/services/web/server/tests/unit/with_dbs/02/test_announcements.py @@ -185,7 +185,7 @@ async def test_list_announcements_filtered( def test_model_examples( model_cls: type[BaseModel], example_name: int, example_data: Any ): - assert model_cls.parse_obj( + assert model_cls.model_validate( example_data ), f"Failed {example_name} : {json.dumps(example_data)}" @@ -193,7 +193,7 @@ def test_model_examples( def test_invalid_announcement(faker: Faker): now = arrow.utcnow() with pytest.raises(ValidationError): - Announcement.parse_obj( + Announcement.model_validate( { "id": "Student_Competition_2023", "products": ["s4llite", "osparc"], @@ -209,7 +209,7 @@ def test_invalid_announcement(faker: Faker): def test_announcement_expired(faker: Faker): now = arrow.utcnow() - model = Announcement.parse_obj( + model = Announcement.model_validate( { "id": "Student_Competition_2023", "products": ["s4llite", "osparc"], diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index 6c841fa8650..3fe08f2172a 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -150,7 +150,7 @@ async def test_copying_large_project_and_retrieving_copy_task( create_url = create_url.with_query(from_study=user_project["uuid"]) resp = await client.post(f"{create_url}", json={}) data, error = await assert_status(resp, expected.accepted) - created_copy_task = TaskGet.parse_obj(data) + created_copy_task = TaskGet.model_validate(data) # list current tasks list_task_url = client.app.router["list_tasks"].url_for() resp = await client.get(f"{list_task_url}") diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py index 6ca7392dd4b..28c8db0cfa6 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py @@ -49,7 +49,7 @@ async def _request_clone_project(client: TestClient, url: URL) -> ProjectGet: data = await long_running_task.result() assert data is not None - return ProjectGet.parse_obj(data) + return ProjectGet.model_validate(data) @pytest.mark.parametrize(*standard_role_response(), ids=str) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py index 763976e2504..52b78b7be6c 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py @@ -80,7 +80,7 @@ async def _request_clone_project(client: TestClient, url: URL) -> ProjectGet: data = await long_running_task.result() assert data is not None - return ProjectGet.parse_obj(data) + return ProjectGet.model_validate(data) @pytest.mark.parametrize( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index 8243228681b..da875127b26 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -383,7 +383,7 @@ def num_services( self, *args, **kwargs ) -> list[DynamicServiceGet]: # noqa: ARG002 return [ - DynamicServiceGet.parse_obj( + DynamicServiceGet.model_validate( DynamicServiceGet.Config.schema_extra["examples"][1] | {"service_uuid": service_uuid, "project_id": user_project["uuid"]} ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py index ae1b62e0558..06e20946e91 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py @@ -276,7 +276,7 @@ async def test_clone_project_and_set_inputs( data = await long_running_task.result() assert data is not None - cloned_project = ProjectGet.parse_obj(data) + cloned_project = ProjectGet.model_validate(data) assert parent_project_id != cloned_project.uuid assert user_project["description"] == cloned_project.description diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 4f6b3c3cf1f..d3945674158 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -1040,7 +1040,7 @@ async def test_project_node_lifetime( # noqa: PLR0915 node_sample = deepcopy(NodeGet.Config.schema_extra["example"]) mocked_director_v2_api[ "dynamic_scheduler.api.get_dynamic_service" - ].return_value = NodeGet.parse_obj( + ].return_value = NodeGet.model_validate( { **node_sample, "service_state": "running", @@ -1059,7 +1059,7 @@ async def test_project_node_lifetime( # noqa: PLR0915 ) mocked_director_v2_api[ "dynamic_scheduler.api.get_dynamic_service" - ].return_value = NodeGetIdle.parse_obj( + ].return_value = NodeGetIdle.model_validate( { "service_uuid": node_sample["service_uuid"], "service_state": "idle", diff --git a/services/web/server/tests/unit/with_dbs/03/folders/test_folders.py b/services/web/server/tests/unit/with_dbs/03/folders/test_folders.py index 16cc4f667fb..92cad152a71 100644 --- a/services/web/server/tests/unit/with_dbs/03/folders/test_folders.py +++ b/services/web/server/tests/unit/with_dbs/03/folders/test_folders.py @@ -54,7 +54,7 @@ async def test_folders_full_workflow( url = client.app.router["create_folder"].url_for() resp = await client.post(url.path, json={"name": "My first folder"}) added_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) - assert FolderGet.parse_obj(added_folder) + assert FolderGet.model_validate(added_folder) # list user folders url = client.app.router["list_folders"].url_for() @@ -74,7 +74,7 @@ async def test_folders_full_workflow( ) resp = await client.get(url) data, _ = await assert_status(resp, status.HTTP_200_OK) - assert FolderGet.parse_obj(data) + assert FolderGet.model_validate(data) assert data["folderId"] == added_folder["folderId"] assert data["name"] == "My first folder" @@ -89,7 +89,7 @@ async def test_folders_full_workflow( }, ) data, _ = await assert_status(resp, status.HTTP_200_OK) - assert FolderGet.parse_obj(data) + assert FolderGet.model_validate(data) # list user folders url = client.app.router["list_folders"].url_for() @@ -189,7 +189,7 @@ async def test_sub_folders_full_workflow( }, ) data, _ = await assert_status(resp, status.HTTP_200_OK) - assert FolderGet.parse_obj(data) + assert FolderGet.model_validate(data) # list user root folders base_url = client.app.router["list_folders"].url_for() diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py b/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py index ea792b8f726..d318e17a96f 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/conftest.py @@ -66,7 +66,7 @@ def fake_osparc_invitation( Emulates an invitation for osparc product """ oas = deepcopy(invitations_service_openapi_specs) - content = ApiInvitationContent.parse_obj( + content = ApiInvitationContent.model_validate( oas["components"]["schemas"]["ApiInvitationContent"]["example"] ) content.product = "osparc" @@ -150,7 +150,7 @@ def _generate(url, **kwargs): return CallbackResult( status=status.HTTP_200_OK, payload=jsonable_encoder( - ApiInvitationContentAndLink.parse_obj( + ApiInvitationContentAndLink.model_validate( { **example, **body, diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py index 7fa3ee144a7..be73ad487c9 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py @@ -52,7 +52,7 @@ async def test_check_registration_invitation_when_not_required( ) data, _ = await assert_status(response, status.HTTP_200_OK) - invitation = InvitationInfo.parse_obj(data) + invitation = InvitationInfo.model_validate(data) assert invitation.email is None @@ -74,7 +74,7 @@ async def test_check_registration_invitations_with_old_code( ) data, _ = await assert_status(response, status.HTTP_200_OK) - invitation = InvitationInfo.parse_obj(data) + invitation = InvitationInfo.model_validate(data) assert invitation.email is None @@ -100,7 +100,7 @@ async def test_check_registration_invitation_and_get_email( ) data, _ = await assert_status(response, status.HTTP_200_OK) - invitation = InvitationInfo.parse_obj(data) + invitation = InvitationInfo.model_validate(data) assert invitation.email == fake_osparc_invitation.guest diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py index 0f8a85544f4..749fddb1548 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_products__invitations_handlers.py @@ -56,7 +56,7 @@ async def test_role_access_to_generate_invitation( ) data, error = await assert_status(response, expected_status) if not error: - got = InvitationGenerated.parse_obj(data) + got = InvitationGenerated.model_validate(data) assert got.guest == guest_email else: assert error @@ -99,7 +99,7 @@ async def test_product_owner_generates_invitation( data, error = await assert_status(response, expected_status) assert not error - got = InvitationGenerated.parse_obj(data) + got = InvitationGenerated.model_validate(data) expected = { "issuer": logged_user["email"][:_MAX_LEN], **request_model.dict(exclude_none=True), @@ -186,7 +186,7 @@ async def test_pre_registration_and_invitation_workflow( response = await client.post("/v0/invitation:generate", json=invitation) data, _ = await assert_status(response, status.HTTP_200_OK) assert data["guest"] == guest_email - got_invitation = InvitationGenerated.parse_obj(data) + got_invitation = InvitationGenerated.model_validate(data) # register user assert got_invitation.invitation_link.fragment diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py index 61ade5ec24b..a3b51fe4d2c 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py @@ -494,7 +494,7 @@ async def test_registraton_with_invitation_for_trial_account( url = client.app.router["get_my_profile"].url_for() response = await client.get(url.path) data, _ = await assert_status(response, status.HTTP_200_OK) - profile = ProfileGet.parse_obj(data) + profile = ProfileGet.model_validate(data) expected = invitation.user["created_at"] + timedelta(days=TRIAL_DAYS) assert profile.expiration_date diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py index 05bd373b15f..b6fe255b0e6 100644 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py +++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/test_meta_modeling_iterations.py @@ -131,7 +131,7 @@ async def test_iterators_workflow( project_id=project_data["uuid"] ) for node_id, node_data in modifications["workbench"].items(): - node = Node.parse_obj(node_data) + node = Node.model_validate(node_data) response = await client.post( f"{create_node_url}", json={ @@ -188,7 +188,7 @@ async def _mock_start(project_id, user_id, product_name, **options): f"/v0/projects/{project_uuid}/checkpoint/{head_ref_id}/iterations?offset=0" ) body = await response.json() - first_iterlist = Page[ProjectIterationItem].parse_obj(body).data + first_iterlist = Page[ProjectIterationItem].model_validate(body).data assert len(first_iterlist) == 3 @@ -232,7 +232,7 @@ async def _mock_catalog_get(*args, **kwarg): assert response.status == status.HTTP_200_OK, await response.text() body = await response.json() - assert Page[ProjectIterationResultItem].parse_obj(body).data is not None + assert Page[ProjectIterationResultItem].model_validate(body).data is not None # GET project and MODIFY iterator values---------------------------------------------- # - Change iterations from 0:4 -> HEAD+1 @@ -246,7 +246,7 @@ async def _mock_catalog_get(*args, **kwarg): # Dict keys are usually some sort of identifier, typically a UUID or # and index but nothing prevents a dict from using any other type of key types # - project = Project.parse_obj(body["data"]) + project = Project.model_validate(body["data"]) new_project = project.copy( update={ # TODO: HACK to overcome export from None -> string @@ -291,7 +291,7 @@ async def _mock_catalog_get(*args, **kwarg): ) body = await response.json() assert response.status == status.HTTP_200_OK, f"{body=}" # nosec - second_iterlist = Page[ProjectIterationItem].parse_obj(body).data + second_iterlist = Page[ProjectIterationItem].model_validate(body).data assert len(second_iterlist) == 4 assert len({it.workcopy_project_id for it in second_iterlist}) == len( diff --git a/services/web/server/tests/unit/with_dbs/03/test_email.py b/services/web/server/tests/unit/with_dbs/03/test_email.py index e2164071c16..d6f7a050239 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_email.py +++ b/services/web/server/tests/unit/with_dbs/03/test_email.py @@ -136,9 +136,9 @@ async def test_email_handlers( assert error is None with pytest.raises(ValidationError): - EmailTestFailed.parse_obj(data) + EmailTestFailed.model_validate(data) - passed = EmailTestPassed.parse_obj(data) + passed = EmailTestPassed.model_validate(data) print(passed.json(indent=1)) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users.py b/services/web/server/tests/unit/with_dbs/03/test_users.py index dac6f6696c4..e1f66068421 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users.py @@ -82,12 +82,12 @@ async def test_get_profile( data, error = await assert_status(resp, expected) # check enveloped - e = Envelope[ProfileGet].parse_obj(await resp.json()) + e = Envelope[ProfileGet].model_validate(await resp.json()) assert e.error == error assert e.data.dict(**RESPONSE_MODEL_POLICY) == data if e.data else e.data == data if not error: - profile = ProfileGet.parse_obj(data) + profile = ProfileGet.model_validate(data) product_group = { "accessRights": {"delete": False, "read": False, "write": False}, diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py b/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py index 77aaccade51..70d76a247f5 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py @@ -71,7 +71,7 @@ def _create_notification( notification_categories = tuple(NotificationCategory) notification: UserNotification = UserNotification.create_from_request_data( - UserNotificationCreate.parse_obj( + UserNotificationCreate.model_validate( { "user_id": user_id, "category": random.choice(notification_categories), diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py index 64c0052efd6..f73e573a1dd 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py @@ -207,7 +207,7 @@ async def _go(client: TestClient, project_uuid: UUID) -> None: # add a node node_id = faker.uuid4() - node = Node.parse_obj( + node = Node.model_validate( { "key": f"simcore/services/comp/test_{__name__}", "version": "1.0.0", diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py index ed04b3728e2..026d0276641 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py @@ -21,7 +21,7 @@ def test_compute_workbench_checksum(fake_project: ProjectDict): # as a dict sha1_w_dict = compute_workbench_checksum(fake_project["workbench"]) - workbench = WorkbenchModel.parse_obj(fake_project["workbench"]) + workbench = WorkbenchModel.model_validate(fake_project["workbench"]) # with pydantic models, i.e. Nodes # diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py index 05ab31ccdf8..078ba287a5e 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control_handlers.py @@ -32,7 +32,7 @@ async def assert_resp_page( assert resp.status == status.HTTP_200_OK, f"Got {await resp.text()}" body = await resp.json() - page = expected_page_cls.parse_obj(body) + page = expected_page_cls.model_validate(body) assert page.meta.total == expected_total assert page.meta.count == expected_count return page @@ -42,7 +42,7 @@ async def assert_status_and_body( resp, expected_cls: HTTPStatus, expected_model: type[BaseModel] ) -> BaseModel: data, _ = await assert_status(resp, expected_cls) - return expected_model.parse_obj(data) + return expected_model.model_validate(data) @pytest.mark.acceptance_test() @@ -59,7 +59,7 @@ async def test_workflow( # get existing project resp = await client.get(f"/{VX}/projects/{project_uuid}") data, _ = await assert_status(resp, status.HTTP_200_OK) - project = Project.parse_obj(data) + project = Project.model_validate(data) assert project.uuid == UUID(project_uuid) # @@ -78,7 +78,7 @@ async def test_workflow( data, _ = await assert_status(resp, status.HTTP_201_CREATED) assert data - checkpoint1 = CheckpointApiModel.parse_obj(data) # NOTE: this is NOT API model + checkpoint1 = CheckpointApiModel.model_validate(data) # NOTE: this is NOT API model # # this project now has a repo @@ -87,20 +87,20 @@ async def test_workflow( resp, expected_page_cls=Page[ProjectDict], expected_total=1, expected_count=1 ) - repo = RepoApiModel.parse_obj(page.data[0]) + repo = RepoApiModel.model_validate(page.data[0]) assert repo.project_uuid == UUID(project_uuid) # GET checkpoint with HEAD resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.parse_obj(data) == checkpoint1 + assert CheckpointApiModel.model_validate(data) == checkpoint1 # TODO: GET checkpoint with tag with pytest.raises(aiohttp.ClientResponseError) as excinfo: resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/v1") resp.raise_for_status() - assert CheckpointApiModel.parse_obj(data) == checkpoint1 + assert CheckpointApiModel.model_validate(data) == checkpoint1 assert excinfo.value.status == status.HTTP_501_NOT_IMPLEMENTED @@ -109,7 +109,7 @@ async def test_workflow( f"/{VX}/repos/projects/{project_uuid}/checkpoints/{checkpoint1.id}" ) assert str(resp.url) == checkpoint1.url - assert CheckpointApiModel.parse_obj(data) == checkpoint1 + assert CheckpointApiModel.model_validate(data) == checkpoint1 # LIST checkpoints resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints") @@ -120,7 +120,7 @@ async def test_workflow( expected_count=1, ) - assert CheckpointApiModel.parse_obj(page.data[0]) == checkpoint1 + assert CheckpointApiModel.model_validate(page.data[0]) == checkpoint1 # UPDATE checkpoint annotations resp = await client.patch( @@ -128,7 +128,7 @@ async def test_workflow( json={"message": "updated message", "tag": "Version 1"}, ) data, _ = await assert_status(resp, status.HTTP_200_OK) - checkpoint1_updated = CheckpointApiModel.parse_obj(data) + checkpoint1_updated = CheckpointApiModel.model_validate(data) assert checkpoint1.id == checkpoint1_updated.id assert checkpoint1.checksum == checkpoint1_updated.checksum @@ -154,30 +154,30 @@ async def test_workflow( json={"tag": "v2", "message": "new commit"}, ) data, _ = await assert_status(resp, status.HTTP_201_CREATED) - checkpoint2 = CheckpointApiModel.parse_obj(data) + checkpoint2 = CheckpointApiModel.model_validate(data) assert checkpoint2.tags == ("v2",) # GET checkpoint with HEAD resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.parse_obj(data) == checkpoint2 + assert CheckpointApiModel.model_validate(data) == checkpoint2 # CHECKOUT resp = await client.post( f"/{VX}/repos/projects/{project_uuid}/checkpoints/{checkpoint1.id}:checkout" ) data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.parse_obj(data) == checkpoint1_updated + assert CheckpointApiModel.model_validate(data) == checkpoint1_updated # GET checkpoint with HEAD resp = await client.get(f"/{VX}/repos/projects/{project_uuid}/checkpoints/HEAD") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert CheckpointApiModel.parse_obj(data) == checkpoint1_updated + assert CheckpointApiModel.model_validate(data) == checkpoint1_updated # get working copy resp = await client.get(f"/{VX}/projects/{project_uuid}") data, _ = await assert_status(resp, status.HTTP_200_OK) - project_wc = Project.parse_obj(data) + project_wc = Project.model_validate(data) assert project_wc.uuid == UUID(project_uuid) assert project_wc != project @@ -193,7 +193,7 @@ async def test_create_checkpoint_without_changes( data, _ = await assert_status(resp, status.HTTP_201_CREATED) assert data - checkpoint1 = CheckpointApiModel.parse_obj(data) # NOTE: this is NOT API model + checkpoint1 = CheckpointApiModel.model_validate(data) # NOTE: this is NOT API model # CREATE checkpoint WITHOUT changes resp = await client.post( @@ -203,7 +203,7 @@ async def test_create_checkpoint_without_changes( data, _ = await assert_status(resp, status.HTTP_201_CREATED) assert data - checkpoint2 = CheckpointApiModel.parse_obj(data) # NOTE: this is NOT API model + checkpoint2 = CheckpointApiModel.model_validate(data) # NOTE: this is NOT API model assert ( checkpoint1 == checkpoint2 diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py index 2b59b77c3b5..c84023dfe9a 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/conftest.py @@ -79,7 +79,7 @@ async def _create(): }, ) data, _ = await assert_status(resp, status.HTTP_201_CREATED) - return WalletGet.parse_obj(data) + return WalletGet.model_validate(data) return _create diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py index ed8b2868481..eead1565607 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments.py @@ -102,7 +102,7 @@ async def test_one_time_payment_worfklow( data, error = await assert_status(response, expected_status) if not error: - payment = WalletPaymentInitiated.parse_obj(data) + payment = WalletPaymentInitiated.model_validate(data) assert payment.payment_id assert payment.payment_form_url @@ -197,7 +197,7 @@ async def test_multiple_payments( data, error = await assert_status(response, status.HTTP_201_CREATED) assert data assert not error - payment = WalletPaymentInitiated.parse_obj(data) + payment = WalletPaymentInitiated.model_validate(data) if n % 2: transaction = await _ack_creation_of_wallet_payment( @@ -283,7 +283,7 @@ async def test_complete_payment_errors( assert mock_rpc_payments_service_api["init_payment"].called data, _ = await assert_status(response, status.HTTP_201_CREATED) - payment = WalletPaymentInitiated.parse_obj(data) + payment = WalletPaymentInitiated.model_validate(data) # Cannot complete as PENDING with pytest.raises(ValueError): diff --git a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py index 0980e45caa2..89291d5eb56 100644 --- a/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py +++ b/services/web/server/tests/unit/with_dbs/03/wallets/payments/test_payments_methods.py @@ -65,7 +65,7 @@ async def test_payment_method_worfklow( ) data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert error is None - inited = PaymentMethodInitiated.parse_obj(data) + inited = PaymentMethodInitiated.model_validate(data) assert inited.payment_method_id assert inited.payment_method_form_url.query @@ -140,7 +140,7 @@ async def test_init_and_cancel_payment_method( ) data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert error is None - inited = PaymentMethodInitiated.parse_obj(data) + inited = PaymentMethodInitiated.model_validate(data) # cancel Create response = await client.post( @@ -165,7 +165,7 @@ async def _add_payment_method( ) data, error = await assert_status(response, status.HTTP_202_ACCEPTED) assert error is None - inited = PaymentMethodInitiated.parse_obj(data) + inited = PaymentMethodInitiated.model_validate(data) await _ack_creation_of_wallet_payment_method( client.app, payment_method_id=inited.payment_method_id, @@ -249,7 +249,7 @@ async def test_wallet_autorecharge( ) data, error = await assert_status(response, expected_status) if not error: - updated_auto_recharge = GetWalletAutoRecharge.parse_obj(data) + updated_auto_recharge = GetWalletAutoRecharge.model_validate(data) assert updated_auto_recharge == GetWalletAutoRecharge( payment_method_id=payment_method_id, min_balance_in_credits=settings.PAYMENTS_AUTORECHARGE_MIN_BALANCE_IN_CREDITS, @@ -263,7 +263,7 @@ async def test_wallet_autorecharge( f"/v0/wallets/{wallet.wallet_id}/auto-recharge", ) data, _ = await assert_status(response, status.HTTP_200_OK) - assert updated_auto_recharge == GetWalletAutoRecharge.parse_obj(data) + assert updated_auto_recharge == GetWalletAutoRecharge.model_validate(data) # payment-methods.auto_recharge response = await client.get(f"/v0/wallets/{wallet.wallet_id}/payments-methods") @@ -305,7 +305,7 @@ async def test_delete_primary_payment_method_in_autorecharge( }, ) data, _ = await assert_status(response, status.HTTP_200_OK) - auto_recharge = GetWalletAutoRecharge.parse_obj(data) + auto_recharge = GetWalletAutoRecharge.model_validate(data) assert auto_recharge.enabled is True assert auto_recharge.payment_method_id == payment_method_id assert auto_recharge.monthly_limit_in_usd == 123 @@ -321,7 +321,7 @@ async def test_delete_primary_payment_method_in_autorecharge( f"/v0/wallets/{wallet.wallet_id}/auto-recharge", ) data, _ = await assert_status(response, status.HTTP_200_OK) - auto_recharge_after_delete = GetWalletAutoRecharge.parse_obj(data) + auto_recharge_after_delete = GetWalletAutoRecharge.model_validate(data) assert auto_recharge_after_delete.payment_method_id is None assert auto_recharge_after_delete.enabled is False @@ -334,7 +334,7 @@ async def test_delete_primary_payment_method_in_autorecharge( f"/v0/wallets/{wallet.wallet_id}/auto-recharge", ) data, _ = await assert_status(response, status.HTTP_200_OK) - auto_recharge = GetWalletAutoRecharge.parse_obj(data) + auto_recharge = GetWalletAutoRecharge.model_validate(data) assert auto_recharge.payment_method_id == new_payment_method_id assert auto_recharge.enabled is False @@ -398,7 +398,7 @@ async def test_one_time_payment_with_payment_method( ) data, error = await assert_status(response, expected_status) if not error: - payment = WalletPaymentInitiated.parse_obj(data) + payment = WalletPaymentInitiated.model_validate(data) assert mock_rpc_payments_service_api["pay_with_payment_method"].called assert payment.payment_id diff --git a/services/web/server/tests/unit/with_dbs/03/workspaces/test_workspaces.py b/services/web/server/tests/unit/with_dbs/03/workspaces/test_workspaces.py index e2ace9daa6a..9365094d679 100644 --- a/services/web/server/tests/unit/with_dbs/03/workspaces/test_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/03/workspaces/test_workspaces.py @@ -59,7 +59,7 @@ async def test_workspaces_workflow( }, ) added_workspace, _ = await assert_status(resp, status.HTTP_201_CREATED) - assert WorkspaceGet.parse_obj(added_workspace) + assert WorkspaceGet.model_validate(added_workspace) # list user workspaces url = client.app.router["list_workspaces"].url_for() @@ -96,7 +96,7 @@ async def test_workspaces_workflow( }, ) data, _ = await assert_status(resp, status.HTTP_200_OK) - assert WorkspaceGet.parse_obj(data) + assert WorkspaceGet.model_validate(data) # list user workspaces url = client.app.router["list_workspaces"].url_for() From 8c0fd008649ae59d8bd1487fa3ad882b57756cb6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 16:17:34 +0200 Subject: [PATCH 164/280] change config access --- .../services/director_v2.py | 2 +- .../tests/unit/_with_db/test_api_user.py | 2 +- .../test_api_routers_solvers_jobs.py | 2 +- .../api-server/tests/unit/test__fastapi.py | 4 +- .../tests/unit/test_models_schemas_files.py | 2 +- .../tests/unit/test_models_schemas_solvers.py | 2 +- services/autoscaling/tests/unit/conftest.py | 10 +- .../tests/unit/test_modules_dask.py | 4 +- .../tests/unit/test_utils_docker.py | 2 +- .../with_dbs/test_api_rest_services__list.py | 12 +- .../test_api_rest_services_resources.py | 4 +- .../with_dbs/test_services_access_rights.py | 6 +- .../clusters-keeper/tests/unit/conftest.py | 8 +- .../tests/unit/test_core_settings.py | 4 +- .../tests/unit/test_modules_dask.py | 4 +- .../tests/unit/test_utils_clusters.py | 6 +- .../dask-sidecar/tests/unit/test_tasks.py | 3 +- .../models/comp_tasks.py | 10 +- services/director-v2/tests/conftest.py | 4 +- .../tests/integration/02/conftest.py | 4 +- services/director-v2/tests/unit/conftest.py | 4 +- .../tests/unit/test_modules_dask_client.py | 4 +- ...modules_dynamic_sidecar_client_api_thin.py | 4 +- ...es_dynamic_sidecar_docker_compose_specs.py | 2 +- .../test_modules_dynamic_sidecar_scheduler.py | 6 +- .../tests/unit/test_utils_comp_scheduler.py | 2 +- .../tests/unit/with_dbs/conftest.py | 2 +- .../with_dbs/test_api_route_computations.py | 34 +++-- .../test_api_route_dynamic_services.py | 125 +++++++++++++----- .../tests/unit/with_dbs/test_cli.py | 4 +- ...test_modules_dynamic_sidecar_docker_api.py | 4 +- ...es_dynamic_sidecar_docker_service_specs.py | 24 ++-- .../tests/unit/with_dbs/test_utils_dask.py | 2 +- .../unit/api_rpc/test_api_rpc__services.py | 6 +- services/dynamic-sidecar/tests/conftest.py | 4 +- .../tests/unit/test_api_containers.py | 4 +- .../tests/unit/test_api_prometheus_metrics.py | 2 +- .../unit/test_db_payments_methods_repo.py | 4 +- .../test_db_payments_transactions_repo.py | 2 +- .../test_services_auto_recharge_listener.py | 4 +- ...test_api_resource_tracker_pricing_plans.py | 16 ++- ..._api_resource_tracker_pricing_plans_rpc.py | 16 ++- ...age_triggered_by_listening_with_billing.py | 12 +- ...t_process_rabbitmq_message_with_billing.py | 12 +- ...ss_rabbitmq_message_with_billing_cost_0.py | 4 +- .../unit/isolated/test_dynamic_scheduler.py | 14 +- .../isolated/test_garbage_collector_core.py | 2 +- .../unit/isolated/test_products_model.py | 8 +- .../test_projects__nodes_resources.py | 12 +- .../test_studies_dispatcher_settings.py | 2 +- .../unit/isolated/test_user_notifications.py | 4 +- .../tests/unit/isolated/test_users_models.py | 2 +- .../01/clusters/test_clusters_handlers.py | 6 +- .../01/test_catalog_handlers__pricing_plan.py | 2 +- .../01/test_catalog_handlers__services.py | 7 +- ...st_catalog_handlers__services_resources.py | 2 +- .../server/tests/unit/with_dbs/02/conftest.py | 2 +- .../02/test_projects_nodes_handler.py | 26 +++- ...st_projects_nodes_pricing_unit_handlers.py | 2 +- .../02/test_projects_states_handlers.py | 2 +- .../login/test_login_registration_handlers.py | 2 +- .../test_admin_pricing_plans.py | 29 ++-- .../03/resource_usage/test_pricing_plans.py | 4 +- .../tests/unit/with_dbs/03/test_users.py | 4 +- 64 files changed, 363 insertions(+), 167 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index d61a74248f9..1eafd21cea9 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -54,7 +54,7 @@ def guess_progress(self) -> PercentageInt: json_schema_extra={ "examples": [ { - **ComputationTask.Config.schema_extra["examples"][0], + **ComputationTask.model_config["json_schema_extra"]["examples"][0], "url": "https://link-to-stop-computation", } ] diff --git a/services/api-server/tests/unit/_with_db/test_api_user.py b/services/api-server/tests/unit/_with_db/test_api_user.py index b20c1727be5..0a42177867b 100644 --- a/services/api-server/tests/unit/_with_db/test_api_user.py +++ b/services/api-server/tests/unit/_with_db/test_api_user.py @@ -32,7 +32,7 @@ def mocked_webserver_service_api(app: FastAPI): ) as respx_mock: # NOTE: webserver-api uses the same schema as api-server! # in-memory fake data - me = deepcopy(Profile.Config.schema_extra["example"]) + me = deepcopy(Profile.model_config["json_schema_extra"]["example"]) def _get_me(request): return httpx.Response(status.HTTP_200_OK, json={"data": me}) diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py index d26c29b0ce5..4adc0e851ff 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py @@ -314,7 +314,7 @@ async def test_run_solver_job( example = next( e - for e in ServiceMetaDataPublished.Config.schema_extra["examples"] + for e in ServiceMetaDataPublished.model_config["json_schema_extra"]["examples"] if "boot-options" in e ) diff --git a/services/api-server/tests/unit/test__fastapi.py b/services/api-server/tests/unit/test__fastapi.py index 6cf2e6f13c9..4eaddee4437 100644 --- a/services/api-server/tests/unit/test__fastapi.py +++ b/services/api-server/tests/unit/test__fastapi.py @@ -122,8 +122,8 @@ def test_fastapi_route_name_parsing(client: TestClient, app: FastAPI, faker: Fak # Ensures ':' is allowed in routes # SEE https://github.com/encode/starlette/pull/1657 - solver_key = Solver.Config.schema_extra["example"]["id"] - version = Solver.Config.schema_extra["example"]["version"] + solver_key = Solver.model_config["json_schema_extra"]["example"]["id"] + version = Solver.model_config["json_schema_extra"]["example"]["version"] job_id = faker.uuid4() # Checks whether parse correctly ":action" suffix diff --git a/services/api-server/tests/unit/test_models_schemas_files.py b/services/api-server/tests/unit/test_models_schemas_files.py index ded5c379b58..e4852afd238 100644 --- a/services/api-server/tests/unit/test_models_schemas_files.py +++ b/services/api-server/tests/unit/test_models_schemas_files.py @@ -81,7 +81,7 @@ async def test_create_filemetadata_from_starlette_uploadfile( def test_convert_between_file_models(): storage_file_meta = StorageFileMetaData( - **StorageFileMetaData.Config.schema_extra["examples"][1] + **StorageFileMetaData.model_config["json_schema_extra"]["examples"][1] ) storage_file_meta.file_id = parse_obj_as( StorageFileID, f"api/{uuid4()}/extensionless" diff --git a/services/api-server/tests/unit/test_models_schemas_solvers.py b/services/api-server/tests/unit/test_models_schemas_solvers.py index a8dd693622a..dbdd148be0b 100644 --- a/services/api-server/tests/unit/test_models_schemas_solvers.py +++ b/services/api-server/tests/unit/test_models_schemas_solvers.py @@ -12,7 +12,7 @@ def test_solvers_sorting_by_name_and_version(faker: Faker): # SEE https://packaging.pypa.io/en/latest/version.html # have a solver - one_solver = Solver(**Solver.Config.schema_extra["example"]) + one_solver = Solver(**Solver.model_config["json_schema_extra"]["example"]) assert isinstance(one_solver.pep404_version, Version) major, minor, micro = one_solver.pep404_version.release diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 511434df88b..4f41a173f72 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -149,7 +149,7 @@ def with_labelize_drain_nodes( @pytest.fixture(scope="session") def fake_ssm_settings() -> SSMSettings: - return SSMSettings(**SSMSettings.Config.schema_extra["examples"][0]) + return SSMSettings(**SSMSettings.model_config["json_schema_extra"]["examples"][0]) @pytest.fixture @@ -212,7 +212,9 @@ def app_environment( "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in aws_allowed_ec2_instance_type_names } @@ -243,7 +245,9 @@ def mocked_ec2_instances_envs( "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) | {"ami_id": aws_ami_id} for ec2_type_name in aws_allowed_ec2_instance_type_names diff --git a/services/autoscaling/tests/unit/test_modules_dask.py b/services/autoscaling/tests/unit/test_modules_dask.py index 76dab6883e0..87f4ff6d175 100644 --- a/services/autoscaling/tests/unit/test_modules_dask.py +++ b/services/autoscaling/tests/unit/test_modules_dask.py @@ -42,7 +42,9 @@ _authentication_types = [ NoAuthentication(), - TLSAuthentication.construct(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication.construct( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ] diff --git a/services/autoscaling/tests/unit/test_utils_docker.py b/services/autoscaling/tests/unit/test_utils_docker.py index 03985cfba78..77fa9192461 100644 --- a/services/autoscaling/tests/unit/test_utils_docker.py +++ b/services/autoscaling/tests/unit/test_utils_docker.py @@ -868,7 +868,7 @@ async def test_get_docker_swarm_join_script_returning_unexpected_command_raises( def test_get_docker_login_on_start_bash_command(): registry_settings = RegistrySettings( - **RegistrySettings.Config.schema_extra["examples"][0] + **RegistrySettings.model_config["json_schema_extra"]["examples"][0] ) returned_command = get_docker_login_on_start_bash_command(registry_settings) assert ( diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py index 4b0bd5dceb6..fef7d7249a3 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services__list.py @@ -56,9 +56,9 @@ async def test_list_services_with_details( url = URL("/v0/services").with_query({"user_id": user_id, "details": "true"}) # now fake the director such that it returns half the services - fake_registry_service_data = ServiceMetaDataPublished.Config.schema_extra[ - "examples" - ][0] + fake_registry_service_data = ServiceMetaDataPublished.model_config[ + "json_schema_extra" + ]["examples"][0] mocked_director_service_api_base.get("/services", name="list_services").respond( 200, @@ -262,9 +262,9 @@ async def test_list_services_that_are_deprecated( assert received_service.deprecated == deprecation_date # for details, the director must return the same service - fake_registry_service_data = ServiceMetaDataPublished.Config.schema_extra[ - "examples" - ][0] + fake_registry_service_data = ServiceMetaDataPublished.model_config[ + "json_schema_extra" + ]["examples"][0] mocked_director_service_api_base.get("/services", name="list_services").respond( 200, json={ diff --git a/services/catalog/tests/unit/with_dbs/test_api_rest_services_resources.py b/services/catalog/tests/unit/with_dbs/test_api_rest_services_resources.py index 1ea7e40f18f..3ba395d24d4 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_rest_services_resources.py +++ b/services/catalog/tests/unit/with_dbs/test_api_rest_services_resources.py @@ -243,7 +243,9 @@ def factory(services_labels: dict[str, dict[str, Any]]) -> None: }, parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][1], + ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][1], ), "simcore/services/dynamic/sim4life-dy", "3.0.0", diff --git a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py index 4affcb83c58..7da4f06edef 100644 --- a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py +++ b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py @@ -98,7 +98,7 @@ async def test_auto_upgrade_policy( return_value=False, ) # Avoids creating a users + user_to_group table - data = GroupAtDB.Config.schema_extra["example"] + data = GroupAtDB.model_config["json_schema_extra"]["example"] data["gid"] = everyone_gid mocker.patch( "simcore_service_catalog.services.access_rights.GroupsRepository.get_everyone_group", @@ -112,7 +112,9 @@ async def test_auto_upgrade_policy( # SETUP --- MOST_UPDATED_EXAMPLE = -1 new_service_metadata = ServiceMetaDataPublished.model_validate( - ServiceMetaDataPublished.Config.schema_extra["examples"][MOST_UPDATED_EXAMPLE] + ServiceMetaDataPublished.model_config["json_schema_extra"]["examples"][ + MOST_UPDATED_EXAMPLE + ] ) new_service_metadata.version = parse_obj_as(ServiceVersion, "1.0.11") diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index a8f4913d4bb..8d1ccde8f57 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -119,7 +119,9 @@ def app_environment( { random.choice( # noqa: S311 ec2_instances - ): EC2InstanceBootSpecific.Config.schema_extra["examples"][ + ): EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ][ 1 ] # NOTE: we use example with custom script } @@ -137,7 +139,9 @@ def app_environment( "WORKERS_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in ec2_instances } diff --git a/services/clusters-keeper/tests/unit/test_core_settings.py b/services/clusters-keeper/tests/unit/test_core_settings.py index 0e467dc1e67..d734bf32cff 100644 --- a/services/clusters-keeper/tests/unit/test_core_settings.py +++ b/services/clusters-keeper/tests/unit/test_core_settings.py @@ -45,7 +45,9 @@ def test_multiple_primary_ec2_instances_raises( "PRIMARY_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.Config.schema_extra["examples"] + EC2InstanceBootSpecific.model_config["json_schema_extra"][ + "examples" + ] ) for ec2_type_name in ec2_instances } diff --git a/services/clusters-keeper/tests/unit/test_modules_dask.py b/services/clusters-keeper/tests/unit/test_modules_dask.py index db1833ffd91..c43615e0409 100644 --- a/services/clusters-keeper/tests/unit/test_modules_dask.py +++ b/services/clusters-keeper/tests/unit/test_modules_dask.py @@ -24,7 +24,9 @@ _authentication_types = [ NoAuthentication(), - TLSAuthentication.construct(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication.construct( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ] diff --git a/services/clusters-keeper/tests/unit/test_utils_clusters.py b/services/clusters-keeper/tests/unit/test_utils_clusters.py index a6592ed1fa4..af4b0f428b8 100644 --- a/services/clusters-keeper/tests/unit/test_utils_clusters.py +++ b/services/clusters-keeper/tests/unit/test_utils_clusters.py @@ -60,7 +60,7 @@ def app_environment( monkeypatch, { "CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": json_dumps( - TLSAuthentication.Config.schema_extra["examples"][0] + TLSAuthentication.model_config["json_schema_extra"]["examples"][0] ) }, ) @@ -240,7 +240,9 @@ def test_startup_script_defines_all_envs_for_docker_compose( "authentication", [ NoAuthentication(), - TLSAuthentication(**TLSAuthentication.Config.schema_extra["examples"][0]), + TLSAuthentication( + **TLSAuthentication.model_config["json_schema_extra"]["examples"][0] + ), ], ) def test_create_cluster_from_ec2_instance( diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 88eaf23c15b..9abc3e84e6a 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -438,7 +438,8 @@ def mocked_get_image_labels( integration_version: version.Version, mocker: MockerFixture ) -> mock.Mock: labels: ImageLabels = parse_obj_as( - ImageLabels, ServiceMetaDataPublished.Config.schema_extra["examples"][0] + ImageLabels, + ServiceMetaDataPublished.model_config["json_schema_extra"]["examples"][0], ) labels.integration_version = f"{integration_version}" return mocker.patch( diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 5895411b2d0..f1076b7b83b 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -77,7 +77,9 @@ class Config: "tag": "1.3.1", "node_requirements": node_req_example, } - for node_req_example in NodeRequirements.Config.schema_extra["examples"] + for node_req_example in NodeRequirements.model_config[ + "json_schema_extra" + ]["examples"] ] + # old version @@ -235,8 +237,10 @@ class Config: "pricing_unit_id": 1, "pricing_unit_cost_id": 1, }, - "hardware_info": HardwareInfo.Config.schema_extra["examples"][0], + "hardware_info": HardwareInfo.model_config["json_schema_extra"][ + "examples" + ][0], } - for image_example in Image.Config.schema_extra["examples"] + for image_example in Image.model_config["json_schema_extra"]["examples"] ] } diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index b69de758685..888bfe69a5d 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -334,7 +334,9 @@ async def wrapper(*args, **kwargs): @pytest.fixture def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: - fake_data = ApiKeyGet.model_validate(ApiKeyGet.Config.schema_extra["examples"][0]) + fake_data = ApiKeyGet.model_validate( + ApiKeyGet.model_config["json_schema_extra"]["examples"][0] + ) async def _create( app: FastAPI, diff --git a/services/director-v2/tests/integration/02/conftest.py b/services/director-v2/tests/integration/02/conftest.py index ed80fc0fce7..ce8e0e1cf5f 100644 --- a/services/director-v2/tests/integration/02/conftest.py +++ b/services/director-v2/tests/integration/02/conftest.py @@ -74,7 +74,7 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: def service_resources() -> ServiceResourcesDict: return parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @@ -82,7 +82,7 @@ def service_resources() -> ServiceResourcesDict: def mock_resource_usage_tracker(mocker: MockerFixture) -> None: base_module = "simcore_service_director_v2.modules.resource_usage_tracker_client" service_pricing_plan = PricingPlanGet.model_validate( - PricingPlanGet.Config.schema_extra["examples"][1] + PricingPlanGet.model_config["json_schema_extra"]["examples"][1] ) for unit in service_pricing_plan.pricing_units: unit.specific_info.aws_ec2_instances.clear() diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 4dc385388cf..0d52604fa88 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -57,7 +57,7 @@ def simcore_services_network_name() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: simcore_service_labels = SimcoreServiceLabels.model_validate( - SimcoreServiceLabels.Config.schema_extra["examples"][1] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][1] ) simcore_service_labels.callbacks_mapping = parse_obj_as(CallbacksMapping, {}) return simcore_service_labels @@ -66,7 +66,7 @@ def simcore_service_labels() -> SimcoreServiceLabels: @pytest.fixture def dynamic_service_create() -> DynamicServiceCreate: return DynamicServiceCreate.model_validate( - DynamicServiceCreate.Config.schema_extra["example"] + DynamicServiceCreate.model_config["json_schema_extra"]["example"] ) diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index 16ebc158313..23910f02fcc 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -487,7 +487,9 @@ def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.model_validate(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.model_validate( + HardwareInfo.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index bd75682dec2..e4d04b46ea8 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -284,7 +284,9 @@ async def test_put_volumes( { "metrics_params": parse_obj_as( CreateServiceMetricsAdditionalParams, - CreateServiceMetricsAdditionalParams.Config.schema_extra["example"], + CreateServiceMetricsAdditionalParams.model_config[ + "json_schema_extra" + ]["example"], ) }, id="post_containers_tasks", diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py index 8b390e7b973..d06444cf155 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py @@ -156,7 +156,7 @@ async def test_inject_resource_limits_and_reservations( [ pytest.param( json.loads( - SimcoreServiceLabels.Config.schema_extra["examples"][2][ + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2][ "simcore.service.compose-spec" ] ), diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index 13d617ed82d..13763953d07 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -162,7 +162,11 @@ def mocked_director_v0( ), name="service labels", ).respond( - json={"data": SimcoreServiceLabels.Config.schema_extra["examples"][0]} + json={ + "data": SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0] + } ) yield mock diff --git a/services/director-v2/tests/unit/test_utils_comp_scheduler.py b/services/director-v2/tests/unit/test_utils_comp_scheduler.py index fd1ecc5022c..dfb7c0326b1 100644 --- a/services/director-v2/tests/unit/test_utils_comp_scheduler.py +++ b/services/director-v2/tests/unit/test_utils_comp_scheduler.py @@ -79,7 +79,7 @@ def test_get_resource_tracking_run_id( "task", [ CompTaskAtDB.model_validate(example) - for example in CompTaskAtDB.Config.schema_extra["examples"] + for example in CompTaskAtDB.model_config["json_schema_extra"]["examples"] ], ids=str, ) diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 4094ce65b5d..cf0cf3b99d2 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -192,7 +192,7 @@ def cluster( created_cluster_ids: list[str] = [] def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: - cluster_config = Cluster.Config.schema_extra["examples"][1] + cluster_config = Cluster.model_config["json_schema_extra"]["examples"][1] cluster_config["owner"] = user["primary_gid"] cluster_config.update(**cluster_kwargs) new_cluster = Cluster.model_validate(cluster_config) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index d64d3212ccf..46996ace7d3 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -108,7 +108,7 @@ def fake_service_details(mocks_dir: Path) -> ServiceMetaDataPublished: @pytest.fixture def fake_service_extras() -> ServiceExtras: - extra_example = ServiceExtras.Config.schema_extra["examples"][2] + extra_example = ServiceExtras.model_config["json_schema_extra"]["examples"][2] random_extras = ServiceExtras(**extra_example) assert random_extras is not None return random_extras @@ -118,13 +118,15 @@ def fake_service_extras() -> ServiceExtras: def fake_service_resources() -> ServiceResourcesDict: return parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @pytest.fixture def fake_service_labels() -> dict[str, Any]: - return choice(SimcoreServiceLabels.Config.schema_extra["examples"]) # noqa: S311 + return choice( + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"] + ) # noqa: S311 @pytest.fixture @@ -256,7 +258,7 @@ def _mocked_services_details( @pytest.fixture( - params=PricingPlanGet.Config.schema_extra["examples"], + params=PricingPlanGet.model_config["json_schema_extra"]["examples"], ids=["with ec2 restriction", "without"], ) def default_pricing_plan(request: pytest.FixtureRequest) -> PricingPlanGet: @@ -300,7 +302,7 @@ def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: ( default_pricing_plan.pricing_units[0] if default_pricing_plan.pricing_units - else PricingUnitGet.Config.schema_extra["examples"][0] + else PricingUnitGet.model_config["json_schema_extra"]["examples"][0] ), by_alias=True, ), @@ -460,7 +462,9 @@ def mocked_clusters_keeper_service_get_instance_type_details_with_invalid_name( ) -@pytest.fixture(params=ServiceResourcesDictHelpers.Config.schema_extra["examples"]) +@pytest.fixture( + params=ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"] +) def project_nodes_overrides(request: pytest.FixtureRequest) -> dict[str, Any]: return request.param @@ -569,7 +573,11 @@ async def test_create_computation_with_wallet( @pytest.mark.parametrize( "default_pricing_plan", - [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], + [ + PricingPlanGet.construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + ) + ], ) async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_raises_422( minimal_configuration: None, @@ -607,7 +615,11 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai @pytest.mark.parametrize( "default_pricing_plan", - [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], + [ + PricingPlanGet.construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + ) + ], ) async def test_create_computation_with_wallet_with_no_clusters_keeper_raises_503( minimal_configuration: None, @@ -708,9 +720,9 @@ async def test_start_computation_with_project_node_resources_defined( proj = await project( user, project_nodes_overrides={ - "required_resources": ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0] + "required_resources": ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0] }, workbench=fake_workbench_without_outputs, ) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index c726c6d2f9c..699a4847efe 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -161,11 +161,17 @@ async def mock_retrieve_features( ) as respx_mock: if is_legacy: service_details = RunningDynamicServiceDetails.model_validate( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + RunningDynamicServiceDetails.model_config["json_schema_extra"][ + "examples" + ][0] ) respx_mock.post( f"{service_details.legacy_service_url}/retrieve", name="retrieve" - ).respond(json=RetrieveDataOutEnveloped.Config.schema_extra["examples"][0]) + ).respond( + json=RetrieveDataOutEnveloped.model_config["json_schema_extra"][ + "examples" + ][0] + ) yield respx_mock # no cleanup required @@ -228,7 +234,9 @@ def mocked_director_v0_service_api( name="running interactive service", ).respond( json={ - "data": RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + "data": RunningDynamicServiceDetails.model_config["json_schema_extra"][ + "examples" + ][0] } ) @@ -245,7 +253,9 @@ def get_stack_status(node_uuid: NodeID) -> RunningDynamicServiceDetails: raise DynamicSidecarNotFoundError(node_uuid) return RunningDynamicServiceDetails.model_validate( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ + 0 + ] ) module_base = "simcore_service_director_v2.modules.dynamic_sidecar.scheduler" @@ -277,8 +287,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -286,8 +300,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ), @@ -295,8 +313,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ), @@ -349,8 +371,12 @@ def test_create_dynamic_services( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -358,8 +384,12 @@ def test_create_dynamic_services( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -367,8 +397,12 @@ def test_create_dynamic_services( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -407,8 +441,12 @@ def test_get_service_status( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -416,8 +454,12 @@ def test_get_service_status( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_204_NO_CONTENT, is_legacy=False, ), @@ -425,8 +467,12 @@ def test_get_service_status( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_204_NO_CONTENT, is_legacy=False, ), @@ -479,8 +525,12 @@ def dynamic_sidecar_scheduler(minimal_app: FastAPI) -> DynamicSidecarsScheduler: [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ) @@ -526,8 +576,12 @@ def test_delete_service_waiting_for_manual_intervention( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_200_OK, is_legacy=True, ), @@ -535,8 +589,12 @@ def test_delete_service_waiting_for_manual_intervention( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -544,8 +602,12 @@ def test_delete_service_waiting_for_manual_intervention( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -569,7 +631,8 @@ def test_retrieve( response.status_code == exp_status_code ), f"expected status code {exp_status_code}, received {response.status_code}: {response.text}" assert ( - response.json() == RetrieveDataOutEnveloped.Config.schema_extra["examples"][0] + response.json() + == RetrieveDataOutEnveloped.model_config["json_schema_extra"]["examples"][0] ) diff --git a/services/director-v2/tests/unit/with_dbs/test_cli.py b/services/director-v2/tests/unit/with_dbs/test_cli.py index d54008a7abc..4b8484571d0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_cli.py +++ b/services/director-v2/tests/unit/with_dbs/test_cli.py @@ -106,7 +106,9 @@ def mock_get_node_state(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.cli._core._get_dy_service_state", return_value=DynamicServiceGet.model_validate( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ + 0 + ] ), ) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 49a429bd0dd..c0749616fee 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -339,7 +339,9 @@ def service_name() -> str: @pytest.fixture( params=[ SimcoreServiceLabels.model_validate(example) - for example in SimcoreServiceLabels.Config.schema_extra["examples"] + for example in SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ] ], ) def labels_example(request: pytest.FixtureRequest) -> SimcoreServiceLabels: diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 670456e142a..5f7ea2fbb04 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -46,7 +46,9 @@ @pytest.fixture def mock_s3_settings() -> S3Settings: - return S3Settings.model_validate(S3Settings.Config.schema_extra["examples"][0]) + return S3Settings.model_validate( + S3Settings.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -113,13 +115,15 @@ def swarm_network_id() -> str: def simcore_service_labels() -> SimcoreServiceLabels: # overwrites global fixture return SimcoreServiceLabels.model_validate( - SimcoreServiceLabels.Config.schema_extra["examples"][2] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.model_validate(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.model_validate( + HardwareInfo.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -177,9 +181,9 @@ def expected_dynamic_sidecar_spec( "state_exclude": ["/tmp/strip_me/*", "*.py"], # noqa: S108 "state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108 }, - "callbacks_mapping": CallbacksMapping.Config.schema_extra[ - "examples" - ][3], + "callbacks_mapping": CallbacksMapping.model_config[ + "json_schema_extra" + ]["examples"][3], "product_name": osparc_product_name, "project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", "proxy_service_name": "dy-proxy_75c7f3f4-18f9-4678-8610-54a2ade78eaa", @@ -187,8 +191,12 @@ def expected_dynamic_sidecar_spec( "request_scheme": "http", "request_simcore_user_agent": request_simcore_user_agent, "restart_policy": "on-inputs-downloaded", - "wallet_info": WalletInfo.Config.schema_extra["examples"][0], - "pricing_info": PricingInfo.Config.schema_extra["examples"][0], + "wallet_info": WalletInfo.model_config["json_schema_extra"][ + "examples" + ][0], + "pricing_info": PricingInfo.model_config["json_schema_extra"][ + "examples" + ][0], "hardware_info": hardware_info, "service_name": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa", "service_port": 65534, diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index 9a2a93d3a33..d8d95f93572 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -494,7 +494,7 @@ def _add_is_directory(entry: mock._Call) -> mock._Call: # noqa: SLF001 @pytest.mark.parametrize( - "req_example", NodeRequirements.Config.schema_extra["examples"] + "req_example", NodeRequirements.model_config["json_schema_extra"]["examples"] ) def test_node_requirements_correctly_convert_to_dask_resources( req_example: dict[str, Any] diff --git a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py index 379719e7297..6d2ee75b8e6 100644 --- a/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py +++ b/services/dynamic-scheduler/tests/unit/api_rpc/test_api_rpc__services.py @@ -53,13 +53,13 @@ def node_not_found(faker: Faker) -> NodeID: @pytest.fixture def service_status_new_style() -> DynamicServiceGet: return DynamicServiceGet.model_validate( - DynamicServiceGet.Config.schema_extra["examples"][1] + DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] ) @pytest.fixture def service_status_legacy() -> NodeGet: - return NodeGet.model_validate(NodeGet.Config.schema_extra["example"]) + return NodeGet.model_validate(NodeGet.model_config["json_schema_extra"]["example"]) @pytest.fixture @@ -174,7 +174,7 @@ async def test_get_state( def dynamic_service_start() -> DynamicServiceStart: # one for legacy and one for new style? return DynamicServiceStart.model_validate( - DynamicServiceStart.Config.schema_extra["example"] + DynamicServiceStart.model_config["json_schema_extra"]["example"] ) diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index 397666815fb..97760f43fdb 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -332,7 +332,9 @@ def mock_stop_heart_beat_task(mocker: MockerFixture) -> AsyncMock: def mock_metrics_params(faker: Faker) -> CreateServiceMetricsAdditionalParams: return parse_obj_as( CreateServiceMetricsAdditionalParams, - CreateServiceMetricsAdditionalParams.Config.schema_extra["example"], + CreateServiceMetricsAdditionalParams.model_config["json_schema_extra"][ + "example" + ], ) diff --git a/services/dynamic-sidecar/tests/unit/test_api_containers.py b/services/dynamic-sidecar/tests/unit/test_api_containers.py index 219d1336476..d8f32426cbc 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_containers.py @@ -267,10 +267,10 @@ def not_started_containers() -> list[str]: def mock_outputs_labels() -> dict[str, ServiceOutput]: return { "output_port_1": ServiceOutput.model_validate( - ServiceOutput.Config.schema_extra["examples"][3] + ServiceOutput.model_config["json_schema_extra"]["examples"][3] ), "output_port_2": ServiceOutput.model_validate( - ServiceOutput.Config.schema_extra["examples"][3] + ServiceOutput.model_config["json_schema_extra"]["examples"][3] ), } diff --git a/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py index 1c56b71db2e..ba7089203a5 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_prometheus_metrics.py @@ -44,7 +44,7 @@ async def enable_prometheus_metrics( monkeypatch, { "DY_SIDECAR_CALLBACKS_MAPPING": json.dumps( - CallbacksMapping.Config.schema_extra["examples"][2] + CallbacksMapping.model_config["json_schema_extra"]["examples"][2] ) }, ) diff --git a/services/payments/tests/unit/test_db_payments_methods_repo.py b/services/payments/tests/unit/test_db_payments_methods_repo.py index f64570cf598..47595bb5557 100644 --- a/services/payments/tests/unit/test_db_payments_methods_repo.py +++ b/services/payments/tests/unit/test_db_payments_methods_repo.py @@ -43,7 +43,9 @@ def app_environment( async def test_create_payments_method_annotations_workflow(app: FastAPI): - fake = PaymentsMethodsDB(**PaymentsMethodsDB.Config.schema_extra["examples"][1]) + fake = PaymentsMethodsDB( + **PaymentsMethodsDB.model_config["json_schema_extra"]["examples"][1] + ) repo = PaymentsMethodsRepo(app.state.engine) diff --git a/services/payments/tests/unit/test_db_payments_transactions_repo.py b/services/payments/tests/unit/test_db_payments_transactions_repo.py index 62e217a9e7a..d4e728d14c5 100644 --- a/services/payments/tests/unit/test_db_payments_transactions_repo.py +++ b/services/payments/tests/unit/test_db_payments_transactions_repo.py @@ -49,7 +49,7 @@ def app_environment( async def test_one_time_payment_annotations_workflow(app: FastAPI): fake = PaymentsTransactionsDB( - **PaymentsTransactionsDB.Config.schema_extra["examples"][1] + **PaymentsTransactionsDB.model_config["json_schema_extra"]["examples"][1] ) repo = PaymentsTransactionsRepo(app.state.engine) diff --git a/services/payments/tests/unit/test_services_auto_recharge_listener.py b/services/payments/tests/unit/test_services_auto_recharge_listener.py index 27b7883b6af..e9a39db6a97 100644 --- a/services/payments/tests/unit/test_services_auto_recharge_listener.py +++ b/services/payments/tests/unit/test_services_auto_recharge_listener.py @@ -162,7 +162,7 @@ async def mocked_pay_with_payment_method(mocker: MockerFixture) -> mock.AsyncMoc return mocker.patch( "simcore_service_payments.services.payments.PaymentsGatewayApi.pay_with_payment_method", return_value=AckPaymentWithPaymentMethod.construct( - **AckPaymentWithPaymentMethod.Config.schema_extra["example"] + **AckPaymentWithPaymentMethod.model_config["json_schema_extra"]["example"] ), ) @@ -201,7 +201,7 @@ async def get_invoice_data( product_name: ProductName, ) -> InvoiceDataGet: return InvoiceDataGet.model_validate( - InvoiceDataGet.Config.schema_extra["examples"][0] + InvoiceDataGet.model_config["json_schema_extra"]["examples"][0] ) await rpc_server.register_router(router, namespace=WEBSERVER_RPC_NAMESPACE) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py index 5e241e60767..b8c056e732a 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans.py @@ -75,7 +75,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -100,7 +102,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="M", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -125,7 +129,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID, unit_name="L", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -164,7 +170,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=_PRICING_PLAN_ID_2, unit_name="XXL", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py index 5a12fd24dbe..410b1be2e40 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_pricing_plans_rpc.py @@ -160,7 +160,9 @@ async def test_rpc_pricing_plans_with_units_workflow( data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="SMALL", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), cost_per_unit=Decimal(10), @@ -192,7 +194,9 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, unit_name=_unit_name, - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), pricing_unit_cost_update=None, @@ -211,7 +215,9 @@ async def test_rpc_pricing_plans_with_units_workflow( pricing_plan_id=_pricing_plan_id, pricing_unit_id=_first_pricing_unit_id, unit_name="MEDIUM", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=True, specific_info=SpecificInfo(aws_ec2_instances=[]), pricing_unit_cost_update=PricingUnitCostUpdate( @@ -242,7 +248,9 @@ async def test_rpc_pricing_plans_with_units_workflow( data=PricingUnitWithCostCreate( pricing_plan_id=_pricing_plan_id, unit_name="LARGE", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"]["examples"][ + 0 + ], default=False, specific_info=SpecificInfo(aws_ec2_instances=[]), cost_per_unit=Decimal(20), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py index 8d95ae78d75..b250f0d654f 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_triggered_by_listening_with_billing.py @@ -57,7 +57,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -82,7 +84,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="M", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -107,7 +111,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="L", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py index 92946509e91..eedb6d11c61 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing.py @@ -71,7 +71,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -96,7 +98,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="M", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=True, specific_info={}, created=datetime.now(tz=timezone.utc), @@ -121,7 +125,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="L", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py index d5bc497fb0f..7522d751d55 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_process_rabbitmq_message_with_billing_cost_0.py @@ -67,7 +67,9 @@ def resource_tracker_pricing_tables_db(postgres_db: sa.engine.Engine) -> Iterato resource_tracker_pricing_units.insert().values( pricing_plan_id=1, unit_name="S", - unit_extra_info=UnitExtraInfo.Config.schema_extra["examples"][0], + unit_extra_info=UnitExtraInfo.model_config["json_schema_extra"][ + "examples" + ][0], default=False, specific_info={}, created=datetime.now(tz=timezone.utc), diff --git a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py index a770fd46f48..e93a9ac6a6a 100644 --- a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py +++ b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py @@ -48,17 +48,19 @@ def mock_rpc_client( @pytest.fixture def dynamic_service_start() -> DynamicServiceStart: return DynamicServiceStart.model_validate( - DynamicServiceStart.Config.schema_extra["example"] + DynamicServiceStart.model_config["json_schema_extra"]["example"] ) @pytest.mark.parametrize( "expected_response", [ - NodeGet.model_validate(NodeGet.Config.schema_extra["example"]), - NodeGetIdle.model_validate(NodeGetIdle.Config.schema_extra["example"]), + NodeGet.model_validate(NodeGet.model_config["json_schema_extra"]["example"]), + NodeGetIdle.model_validate( + NodeGetIdle.model_config["json_schema_extra"]["example"] + ), DynamicServiceGet.model_validate( - DynamicServiceGet.Config.schema_extra["examples"][0] + DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] ), ], ) @@ -98,9 +100,9 @@ async def test_get_service_status_raises_rpc_server_error( @pytest.mark.parametrize( "expected_response", [ - NodeGet.model_validate(NodeGet.Config.schema_extra["example"]), + NodeGet.model_validate(NodeGet.model_config["json_schema_extra"]["example"]), DynamicServiceGet.model_validate( - DynamicServiceGet.Config.schema_extra["examples"][0] + DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] ), ], ) diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index 7226a4001f8..10bfd794f69 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -123,7 +123,7 @@ async def test_remove_orphaned_services_with_no_running_services_does_nothing( def faker_dynamic_service_get() -> Callable[[], DynamicServiceGet]: def _() -> DynamicServiceGet: return DynamicServiceGet.model_validate( - DynamicServiceGet.Config.schema_extra["examples"][1] + DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] ) return _ diff --git a/services/web/server/tests/unit/isolated/test_products_model.py b/services/web/server/tests/unit/isolated/test_products_model.py index b78ad59ba5a..c435a0a1bbb 100644 --- a/services/web/server/tests/unit/isolated/test_products_model.py +++ b/services/web/server/tests/unit/isolated/test_products_model.py @@ -34,13 +34,17 @@ def test_product_examples( def test_product_to_static(): - product = Product.model_validate(Product.Config.schema_extra["examples"][0]) + product = Product.model_validate( + Product.model_config["json_schema_extra"]["examples"][0] + ) assert product.to_statics() == { "displayName": "o²S²PARC", "supportEmail": "support@osparc.io", } - product = Product.model_validate(Product.Config.schema_extra["examples"][2]) + product = Product.model_validate( + Product.model_config["json_schema_extra"]["examples"][2] + ) assert product.to_statics() == { "displayName": "o²S²PARC FOO", diff --git a/services/web/server/tests/unit/isolated/test_projects__nodes_resources.py b/services/web/server/tests/unit/isolated/test_projects__nodes_resources.py index 12f6bfc23b4..259c4ba0c3f 100644 --- a/services/web/server/tests/unit/isolated/test_projects__nodes_resources.py +++ b/services/web/server/tests/unit/isolated/test_projects__nodes_resources.py @@ -18,7 +18,9 @@ "resources", [ parse_obj_as(ServiceResourcesDict, example) - for example in ServiceResourcesDictHelpers.Config.schema_extra["examples"] + for example in ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ] ], ) def test_check_can_update_service_resources_with_same_does_not_raise( @@ -32,7 +34,9 @@ def test_check_can_update_service_resources_with_same_does_not_raise( "resources", [ parse_obj_as(ServiceResourcesDict, example) - for example in ServiceResourcesDictHelpers.Config.schema_extra["examples"] + for example in ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ] ], ) def test_check_can_update_service_resources_with_invalid_container_name_raises( @@ -51,7 +55,9 @@ def test_check_can_update_service_resources_with_invalid_container_name_raises( "resources", [ parse_obj_as(ServiceResourcesDict, example) - for example in ServiceResourcesDictHelpers.Config.schema_extra["examples"] + for example in ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ] ], ) def test_check_can_update_service_resources_with_invalid_image_name_raises( diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py index 91364e64beb..e2255bbcf26 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_settings.py @@ -21,7 +21,7 @@ def environment(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: envs = setenvs_from_dict( monkeypatch, - envs=StudiesDispatcherSettings.Config.schema_extra["example"], + envs=StudiesDispatcherSettings.model_config["json_schema_extra"]["example"], ) return envs diff --git a/services/web/server/tests/unit/isolated/test_user_notifications.py b/services/web/server/tests/unit/isolated/test_user_notifications.py index ff53b0aa4fe..3caca391881 100644 --- a/services/web/server/tests/unit/isolated/test_user_notifications.py +++ b/services/web/server/tests/unit/isolated/test_user_notifications.py @@ -12,7 +12,9 @@ ) -@pytest.mark.parametrize("raw_data", UserNotification.Config.schema_extra["examples"]) +@pytest.mark.parametrize( + "raw_data", UserNotification.model_config["json_schema_extra"]["examples"] +) def test_user_notification(raw_data: dict[str, Any]): assert UserNotification.model_validate(raw_data) diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index 474cb2f1df9..8f3b45cd3df 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -81,7 +81,7 @@ def test_auto_compute_gravatar(faker: Faker): @pytest.mark.parametrize("user_role", [u.name for u in UserRole]) def test_profile_get_role(user_role: str): - for example in ProfileGet.Config.schema_extra["examples"]: + for example in ProfileGet.model_config["json_schema_extra"]["examples"]: data = deepcopy(example) data["role"] = user_role m1 = ProfileGet(**data) diff --git a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py index a0cd6542b53..510b3118dca 100644 --- a/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/clusters/test_clusters_handlers.py @@ -46,18 +46,18 @@ def mocked_director_v2_api(mocker: MockerFixture): ) mocked_director_v2_api.create_cluster.return_value = Cluster.model_validate( - random.choice(Cluster.Config.schema_extra["examples"]) + random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) mocked_director_v2_api.list_clusters.return_value = [] mocked_director_v2_api.get_cluster.return_value = Cluster.model_validate( - random.choice(Cluster.Config.schema_extra["examples"]) + random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) mocked_director_v2_api.get_cluster_details.return_value = { "scheduler": {"status": "running"}, "dashboardLink": "https://link.to.dashboard", } mocked_director_v2_api.update_cluster.return_value = Cluster.model_validate( - random.choice(Cluster.Config.schema_extra["examples"]) + random.choice(Cluster.model_config["json_schema_extra"]["examples"]) ) mocked_director_v2_api.delete_cluster.return_value = None mocked_director_v2_api.ping_cluster.return_value = None diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py index b328ddc4c7d..f6f683d0c37 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py @@ -32,7 +32,7 @@ def mock_rut_api_responses( service_pricing_plan_get = parse_obj_as( PricingPlanGet, - PricingPlanGet.Config.schema_extra["examples"][0], + PricingPlanGet.model_config["json_schema_extra"]["examples"][0], ) aioresponses_mocker.get( re.compile(f"^{settings.api_base_url}/services/+.+$"), diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py index 396e3a1f8a0..38d10825512 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py @@ -56,7 +56,8 @@ async def _list( assert user_id items = parse_obj_as( - list[ServiceGetV2], ServiceGetV2.Config.schema_extra["examples"] + list[ServiceGetV2], + ServiceGetV2.model_config["json_schema_extra"]["examples"], ) total_count = len(items) @@ -80,7 +81,7 @@ async def _get( assert user_id got = parse_obj_as( - ServiceGetV2, ServiceGetV2.Config.schema_extra["examples"][0] + ServiceGetV2, ServiceGetV2.model_config["json_schema_extra"]["examples"][0] ) got.version = service_version got.key = service_key @@ -101,7 +102,7 @@ async def _update( assert user_id got = parse_obj_as( - ServiceGetV2, ServiceGetV2.Config.schema_extra["examples"][0] + ServiceGetV2, ServiceGetV2.model_config["json_schema_extra"]["examples"][0] ) got.version = service_version got.key = service_key diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py index afffca3652a..955a8cb8dd0 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py @@ -34,7 +34,7 @@ def mock_catalog_service_api_responses( service_resources = parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) jsonable_service_resources = ServiceResourcesDictHelpers.create_jsonable( service_resources diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 425756375b1..e33fe523296 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -41,7 +41,7 @@ def mock_service_resources() -> ServiceResourcesDict: return parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index da875127b26..a5b5ad3abdd 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -77,7 +77,9 @@ async def test_get_node_resources( assert DEFAULT_SINGLE_SERVICE_NAME in node_resources assert ( node_resources - == ServiceResourcesDictHelpers.Config.schema_extra["examples"][0] + == ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][0] ) else: assert not data @@ -145,7 +147,9 @@ async def test_replace_node_resources_is_forbidden_by_default( response = await client.put( f"{url}", json=ServiceResourcesDictHelpers.create_jsonable( - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0] + ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][0] ), ) data, error = await assert_status(response, expected) @@ -156,7 +160,9 @@ async def test_replace_node_resources_is_forbidden_by_default( assert DEFAULT_SINGLE_SERVICE_NAME in node_resources assert ( node_resources - == ServiceResourcesDictHelpers.Config.schema_extra["examples"][0] + == ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][0] ) @@ -183,7 +189,9 @@ async def test_replace_node_resources_is_ok_if_explicitly_authorized( response = await client.put( f"{url}", json=ServiceResourcesDictHelpers.create_jsonable( - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0] + ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][0] ), ) data, error = await assert_status(response, expected) @@ -194,7 +202,9 @@ async def test_replace_node_resources_is_ok_if_explicitly_authorized( assert DEFAULT_SINGLE_SERVICE_NAME in node_resources assert ( node_resources - == ServiceResourcesDictHelpers.Config.schema_extra["examples"][0] + == ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][0] ) @@ -218,7 +228,9 @@ async def test_replace_node_resources_raises_422_if_resource_does_not_validate( f"{url}", json=ServiceResourcesDictHelpers.create_jsonable( # NOTE: we apply a different resource set - ServiceResourcesDictHelpers.Config.schema_extra["examples"][1] + ServiceResourcesDictHelpers.model_config["json_schema_extra"][ + "examples" + ][1] ), ) await assert_status(response, expected) @@ -384,7 +396,7 @@ def num_services( ) -> list[DynamicServiceGet]: # noqa: ARG002 return [ DynamicServiceGet.model_validate( - DynamicServiceGet.Config.schema_extra["examples"][1] + DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] | {"service_uuid": service_uuid, "project_id": user_project["uuid"]} ) for service_uuid in self.running_services_uuids diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py index 06957402de2..f51877302b6 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_pricing_unit_handlers.py @@ -99,7 +99,7 @@ def mock_rut_api_responses( settings: ResourceUsageTrackerSettings = get_plugin_settings(client.app) pricing_unit_get_base = parse_obj_as( - PricingUnitGet, PricingUnitGet.Config.schema_extra["examples"][0] + PricingUnitGet, PricingUnitGet.model_config["json_schema_extra"]["examples"][0] ) pricing_unit_get_1 = pricing_unit_get_base.copy() pricing_unit_get_1.pricing_unit_id = _PRICING_UNIT_ID_1 diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index d3945674158..ee92c9fda05 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -1037,7 +1037,7 @@ async def test_project_node_lifetime( # noqa: PLR0915 project_id=user_project["uuid"], node_id=dynamic_node_id ) - node_sample = deepcopy(NodeGet.Config.schema_extra["example"]) + node_sample = deepcopy(NodeGet.model_config["json_schema_extra"]["example"]) mocked_director_v2_api[ "dynamic_scheduler.api.get_dynamic_service" ].return_value = NodeGet.model_validate( diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py index 8e076f89513..b90369a3c14 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration_handlers.py @@ -168,7 +168,7 @@ async def test_request_an_account( assert client.app # A form similar to the one in https://github.com/ITISFoundation/osparc-simcore/pull/5378 user_data = { - **AccountRequestInfo.Config.schema_extra["example"]["form"], + **AccountRequestInfo.model_config["json_schema_extra"]["example"]["form"], # fields required in the form "firstName": faker.first_name(), "lastName": faker.last_name(), diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py index 6e67883e357..04b30407526 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py @@ -36,7 +36,8 @@ def mock_rpc_resource_usage_tracker_service_api( autospec=True, return_value=[ parse_obj_as( - PricingPlanGet, PricingPlanGet.Config.schema_extra["examples"][0] + PricingPlanGet, + PricingPlanGet.model_config["json_schema_extra"]["examples"][0], ) ], ), @@ -44,21 +45,24 @@ def mock_rpc_resource_usage_tracker_service_api( "simcore_service_webserver.resource_usage._pricing_plans_admin_api.pricing_plans.get_pricing_plan", autospec=True, return_value=parse_obj_as( - PricingPlanGet, PricingPlanGet.Config.schema_extra["examples"][0] + PricingPlanGet, + PricingPlanGet.model_config["json_schema_extra"]["examples"][0], ), ), "create_pricing_plan": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_api.pricing_plans.create_pricing_plan", autospec=True, return_value=parse_obj_as( - PricingPlanGet, PricingPlanGet.Config.schema_extra["examples"][0] + PricingPlanGet, + PricingPlanGet.model_config["json_schema_extra"]["examples"][0], ), ), "update_pricing_plan": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_api.pricing_plans.update_pricing_plan", autospec=True, return_value=parse_obj_as( - PricingPlanGet, PricingPlanGet.Config.schema_extra["examples"][0] + PricingPlanGet, + PricingPlanGet.model_config["json_schema_extra"]["examples"][0], ), ), ## Pricing units @@ -66,21 +70,24 @@ def mock_rpc_resource_usage_tracker_service_api( "simcore_service_webserver.resource_usage._pricing_plans_admin_api.pricing_units.get_pricing_unit", autospec=True, return_value=parse_obj_as( - PricingUnitGet, PricingUnitGet.Config.schema_extra["examples"][0] + PricingUnitGet, + PricingUnitGet.model_config["json_schema_extra"]["examples"][0], ), ), "create_pricing_unit": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_api.pricing_units.create_pricing_unit", autospec=True, return_value=parse_obj_as( - PricingUnitGet, PricingUnitGet.Config.schema_extra["examples"][0] + PricingUnitGet, + PricingUnitGet.model_config["json_schema_extra"]["examples"][0], ), ), "update_pricing_unit": mocker.patch( "simcore_service_webserver.resource_usage._pricing_plans_admin_api.pricing_units.update_pricing_unit", autospec=True, return_value=parse_obj_as( - PricingUnitGet, PricingUnitGet.Config.schema_extra["examples"][0] + PricingUnitGet, + PricingUnitGet.model_config["json_schema_extra"]["examples"][0], ), ), ## Pricing plan to service @@ -90,7 +97,9 @@ def mock_rpc_resource_usage_tracker_service_api( return_value=[ parse_obj_as( PricingPlanToServiceGet, - PricingPlanToServiceGet.Config.schema_extra["examples"][0], + PricingPlanToServiceGet.model_config["json_schema_extra"][ + "examples" + ][0], ) ], ), @@ -99,7 +108,9 @@ def mock_rpc_resource_usage_tracker_service_api( autospec=True, return_value=parse_obj_as( PricingPlanToServiceGet, - PricingPlanToServiceGet.Config.schema_extra["examples"][0], + PricingPlanToServiceGet.model_config["json_schema_extra"]["examples"][ + 0 + ], ), ), } diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py index 7b25e33a799..e48d461ad26 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py @@ -33,12 +33,12 @@ def mock_rut_api_responses( settings: ResourceUsageTrackerSettings = get_plugin_settings(client.app) pricing_unit_get = parse_obj_as( - PricingUnitGet, PricingUnitGet.Config.schema_extra["examples"][0] + PricingUnitGet, PricingUnitGet.model_config["json_schema_extra"]["examples"][0] ) service_pricing_plan_get = parse_obj_as( PricingPlanGet, - PricingPlanGet.Config.schema_extra["examples"][0], + PricingPlanGet.model_config["json_schema_extra"]["examples"][0], ) aioresponses_mocker.get( diff --git a/services/web/server/tests/unit/with_dbs/03/test_users.py b/services/web/server/tests/unit/with_dbs/03/test_users.py index e1f66068421..606a8b2103f 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users.py @@ -248,7 +248,9 @@ def account_request_form(faker: Faker) -> dict[str, Any]: } # keeps in sync fields from example and this fixture - assert set(form) == set(AccountRequestInfo.Config.schema_extra["example"]["form"]) + assert set(form) == set( + AccountRequestInfo.model_config["json_schema_extra"]["example"]["form"] + ) return form From 8d0acc226f2da4a6c4e16c2ab0fb0ad345e8360d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 16:23:25 +0200 Subject: [PATCH 165/280] upgrade requirements --- services/catalog/requirements/_base.txt | 48 ++++++++++++++---------- services/catalog/requirements/_test.txt | 15 -------- services/catalog/requirements/_tools.txt | 10 ----- 3 files changed, 28 insertions(+), 45 deletions(-) diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index cf3adce5de0..da1c1fd18ef 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -24,6 +24,8 @@ aiosignal==1.3.1 # via aiohttp alembic==1.13.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -37,10 +39,7 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 - # via - # aiohttp - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via # -r requirements/_base.in @@ -71,22 +70,15 @@ email-validator==2.1.1 # via # fastapi # pydantic -exceptiongroup==1.2.0 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator +fastapi-cli==0.0.5 + # via fastapi faststream==0.5.10 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 @@ -187,13 +179,12 @@ prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in psycopg2-binary==2.9.9 # via sqlalchemy -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -206,6 +197,22 @@ pydantic==1.10.14 # -r requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # fastapi +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # fastapi pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -214,7 +221,7 @@ python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 # via - # pydantic + # pydantic-settings # uvicorn python-multipart==0.0.9 # via fastapi @@ -275,7 +282,7 @@ sqlalchemy==1.4.52 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -297,6 +304,7 @@ typer==0.12.3 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # fastapi-cli # faststream types-python-dateutil==2.9.0.20240316 # via arrow @@ -305,12 +313,11 @@ typing-extensions==4.10.0 # aiodebug # aiodocker # alembic - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer - # uvicorn ujson==5.9.0 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -325,6 +332,7 @@ uvicorn==0.29.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index ee25905a2e3..acad0f2b15d 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -17,10 +17,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp attrs==23.2.0 # via # -c requirements/_base.txt @@ -45,11 +41,6 @@ coverage==7.6.1 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.0 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in frozenlist==1.4.1 @@ -180,11 +171,6 @@ sqlalchemy==1.4.52 # alembic sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-psycopg2==2.9.21.20240819 # via -r requirements/_test.in types-pyyaml==6.0.12.20240808 @@ -193,7 +179,6 @@ typing-extensions==4.10.0 # via # -c requirements/_base.txt # alembic - # anyio # mypy # sqlalchemy2-stubs urllib3==2.2.2 diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index bc04c03d0dd..e99f6785e71 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -73,22 +73,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From e6c95760ae9739050007c7b83d4a5956741041f5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 16:24:40 +0200 Subject: [PATCH 166/280] upgrade requirements --- .../clusters-keeper/requirements/_base.txt | 53 ++++++++++--------- .../clusters-keeper/requirements/_test.txt | 31 +++++------ .../clusters-keeper/requirements/_tools.txt | 10 ---- 3 files changed, 42 insertions(+), 52 deletions(-) diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 5c2cff64acf..2b7f7c6b234 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -43,6 +43,8 @@ aiormq==6.8.0 # via aio-pika aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -54,13 +56,10 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via - # aiohttp - # redis attrs==23.2.0 # via # aiohttp @@ -116,24 +115,10 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.1 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator @@ -269,7 +254,7 @@ psutil==5.9.8 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -280,7 +265,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -297,6 +281,26 @@ pydantic==1.10.15 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -307,6 +311,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -375,7 +381,7 @@ sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -437,16 +443,15 @@ typing-extensions==4.11.0 # via # aiodebug # aiodocker - # anyio # fastapi # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm - # uvicorn urllib3==2.2.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index 3539d24ebad..491a8027fd1 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -11,6 +11,10 @@ aiosignal==1.3.1 # via # -c requirements/_base.txt # aiohttp +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic antlr4-python3-runtime==4.13.2 # via moto anyio==4.3.0 @@ -19,11 +23,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp - # redis attrs==23.2.0 # via # -c requirements/_base.txt @@ -82,11 +81,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.1 - # via - # -c requirements/_base.txt - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in fakeredis==2.23.5 @@ -209,11 +203,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -239,7 +237,9 @@ python-dateutil==2.9.0.post0 # faker # moto python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../requirements/constraints.txt @@ -302,21 +302,16 @@ sortedcontainers==2.4.0 # fakeredis sympy==1.13.2 # via cfn-lint -tomli==2.0.1 - # via - # coverage - # pytest types-pyyaml==6.0.12.20240808 # via -r requirements/_test.in typing-extensions==4.11.0 # via # -c requirements/_base.txt # aiodocker - # anyio # aws-sam-translator # cfn-lint - # fakeredis # pydantic + # pydantic-core urllib3==2.2.1 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/clusters-keeper/requirements/_tools.txt b/services/clusters-keeper/requirements/_tools.txt index db86636a373..7a2c3f9d91b 100644 --- a/services/clusters-keeper/requirements/_tools.txt +++ b/services/clusters-keeper/requirements/_tools.txt @@ -74,22 +74,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.11.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From f004598724fe765c22414c102e6df24d98d0b656 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 16:25:40 +0200 Subject: [PATCH 167/280] upgrade requirements --- services/director-v2/requirements/_base.txt | 64 +++++++++++--------- services/director-v2/requirements/_test.txt | 15 ----- services/director-v2/requirements/_tools.txt | 10 --- 3 files changed, 36 insertions(+), 53 deletions(-) diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 41696797fe8..61392fe3ce4 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -55,6 +55,8 @@ alembic==1.13.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -73,10 +75,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -141,32 +141,15 @@ email-validator==2.1.1 # via # fastapi # pydantic -exceptiongroup==1.2.1 - # via anyio fast-depends==2.4.2 # via faststream -fastapi==0.99.1 +fastapi==0.115.0 # via - # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator +fastapi-cli==0.0.5 + # via fastapi faststream==0.5.10 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -360,7 +343,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.15 +pydantic==2.9.2 # via # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -369,7 +352,6 @@ pydantic==1.10.15 # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -399,6 +381,31 @@ pydantic==1.10.15 # -r requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # fastapi +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # fastapi pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -409,7 +416,7 @@ python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 # via - # pydantic + # pydantic-settings # uvicorn python-engineio==4.9.1 # via python-socketio @@ -523,7 +530,7 @@ sqlalchemy==1.4.52 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.27.0 +starlette==0.38.6 # via # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -578,6 +585,7 @@ typer==0.12.3 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # fastapi-cli # faststream types-python-dateutil==2.9.0.20240316 # via arrow @@ -586,13 +594,12 @@ typing-extensions==4.11.0 # aiodebug # aiodocker # alembic - # anyio # fastapi # faststream # pint # pydantic + # pydantic-core # typer - # uvicorn ujson==5.10.0 # via # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -638,6 +645,7 @@ uvicorn==0.29.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index 4d1d11143d3..4806e20e36a 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -38,10 +38,6 @@ asgi-lifespan==2.1.0 # via -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # aiohttp attrs==23.2.0 # via # -c requirements/_base.txt @@ -100,11 +96,6 @@ distributed==2024.5.1 # dask docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.1 - # via - # -c requirements/_base.txt - # anyio - # pytest execnet==2.1.1 # via pytest-xdist faker==27.0.0 @@ -303,11 +294,6 @@ tblib==3.0.0 # via # -c requirements/_base.txt # distributed -tomli==2.0.1 - # via - # coverage - # mypy - # pytest toolz==0.12.1 # via # -c requirements/_base.txt @@ -331,7 +317,6 @@ typing-extensions==4.11.0 # via # -c requirements/_base.txt # alembic - # anyio # mypy # sqlalchemy2-stubs tzdata==2024.1 diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index ea37071c1ad..261bbf16bef 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -75,22 +75,12 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.11.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 5f03a3590df13a4ae6b664381284e5047df46676 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 16:46:16 +0200 Subject: [PATCH 168/280] run bump-pydantic --- .../core/dynamic_services_settings/sidecar.py | 62 +++++----- .../core/settings.py | 75 ++++++++---- .../models/comp_pipelines.py | 28 +---- .../models/comp_runs.py | 21 ++-- .../models/comp_tasks.py | 110 +++++------------- .../models/dynamic_services_scheduler.py | 68 +++++------ .../models/pricing.py | 9 +- .../docker_service_specs/volume_remover.py | 2 + 8 files changed, 164 insertions(+), 211 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py index 98ce21fc6a4..6057df2bfd1 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py @@ -9,7 +9,7 @@ ensure_unique_dict_values_validator, ensure_unique_list_values_validator, ) -from pydantic import Field, PositiveInt, validator +from pydantic import AliasChoices, Field, PositiveInt, ValidationInfo, field_validator from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.base import BaseCustomSettings from settings_library.efs import AwsEfsSettings @@ -45,7 +45,7 @@ class RCloneSettings(SettingsLibraryRCloneSettings): description="VFS operation mode, defines how and when the disk cache is synced", ) - @validator("R_CLONE_POLL_INTERVAL_SECONDS") + @field_validator("R_CLONE_POLL_INTERVAL_SECONDS") @classmethod def enforce_r_clone_requirement(cls, v: int, values) -> PositiveInt: dir_cache_time = values["R_CLONE_DIR_CACHE_TIME_SECONDS"] @@ -60,7 +60,7 @@ class PlacementSettings(BaseCustomSettings): # https://docs.docker.com/engine/swarm/services/#control-service-placement. DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS: list[DockerPlacementConstraint] = Field( default_factory=list, - example='["node.labels.region==east", "one!=yes"]', + examples=['["node.labels.region==east", "one!=yes"]'], ) DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ @@ -72,20 +72,18 @@ class PlacementSettings(BaseCustomSettings): "see https://github.com/ITISFoundation/osparc-simcore/issues/5250 " "When `None` (default), uses generic resources" ), - example='{"AIRAM": "node.labels.custom==true"}', + examples=['{"AIRAM": "node.labels.custom==true"}'], ) - _unique_custom_constraints = validator( + _unique_custom_constraints = field_validator( "DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS", - allow_reuse=True, )(ensure_unique_list_values_validator) - _unique_resource_placement_constraints_substitutions = validator( + _unique_resource_placement_constraints_substitutions = field_validator( "DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", - allow_reuse=True, )(ensure_unique_dict_values_validator) - @validator("DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") + @field_validator("DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") @classmethod def warn_if_any_values_provided(cls, value: dict) -> dict: if len(value) > 0: @@ -101,40 +99,46 @@ def warn_if_any_values_provided(cls, value: dict) -> dict: class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED: bool = Field( # doc: https://docs.docker.com/engine/swarm/networking/#configure-service-discovery default=False, - env=["DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED"], + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED" + ), description="dynamic-sidecar's service 'endpoint_spec' with {'Mode': 'dnsrr'}", ) DYNAMIC_SIDECAR_SC_BOOT_MODE: BootModeEnum = Field( ..., description="Boot mode used for the dynamic-sidecar services" "By defaults, it uses the same boot mode set for the director-v2", - env=["DYNAMIC_SIDECAR_SC_BOOT_MODE", "SC_BOOT_MODE"], + validation_alias=AliasChoices("DYNAMIC_SIDECAR_SC_BOOT_MODE", "SC_BOOT_MODE"), ) DYNAMIC_SIDECAR_LOG_LEVEL: str = Field( "WARNING", description="log level of the dynamic sidecar" "If defined, it captures global env vars LOG_LEVEL and LOGLEVEL from the director-v2 service", - env=["DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) DYNAMIC_SIDECAR_IMAGE: str = Field( ..., - regex=DYNAMIC_SIDECAR_DOCKER_IMAGE_RE, + pattern=DYNAMIC_SIDECAR_DOCKER_IMAGE_RE, description="used by the director to start a specific version of the dynamic-sidecar", ) - DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS: AwsS3CliSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_EFS_SETTINGS: AwsEfsSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PLACEMENT_SETTINGS: PlacementSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) # @@ -144,7 +148,7 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_MOUNT_PATH_DEV: Path | None = Field( None, description="Host path to the dynamic-sidecar project. Used as source path to mount to the dynamic-sidecar [DEVELOPMENT ONLY]", - example="osparc-simcore/services/dynamic-sidecar", + examples=["osparc-simcore/services/dynamic-sidecar"], ) DYNAMIC_SIDECAR_PORT: PortInt = Field( @@ -157,12 +161,16 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): description="Publishes the service on localhost for debuging and testing [DEVELOPMENT ONLY]" "Can be used to access swagger doc from the host as http://127.0.0.1:30023/dev/doc " "where 30023 is the host published port", + validate_default=True, ) - @validator("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", pre=True) + @field_validator("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", mode="before") @classmethod - def auto_disable_if_production(cls, v, values): - if v and values.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") == BootModeEnum.PRODUCTION: + def auto_disable_if_production(cls, v, info: ValidationInfo): + if ( + v + and info.data.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") == BootModeEnum.PRODUCTION + ): _logger.warning( "In production DYNAMIC_SIDECAR_MOUNT_PATH_DEV cannot be set to %s, enforcing None", v, @@ -170,22 +178,24 @@ def auto_disable_if_production(cls, v, values): return None return v - @validator("DYNAMIC_SIDECAR_EXPOSE_PORT", pre=True, always=True) + # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-validators for more information. + @field_validator("DYNAMIC_SIDECAR_EXPOSE_PORT", mode="before") @classmethod - def auto_enable_if_development(cls, v, values): + def auto_enable_if_development(cls, v, info: ValidationInfo): if ( - boot_mode := values.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") + boot_mode := info.data.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") ) and boot_mode.is_devel_mode(): # Can be used to access swagger doc from the host as http://127.0.0.1:30023/dev/doc return True return v - @validator("DYNAMIC_SIDECAR_IMAGE", pre=True) + @field_validator("DYNAMIC_SIDECAR_IMAGE", mode="before") @classmethod def strip_leading_slashes(cls, v: str) -> str: return v.lstrip("/") - @validator("DYNAMIC_SIDECAR_LOG_LEVEL") + @field_validator("DYNAMIC_SIDECAR_LOG_LEVEL") @classmethod def _validate_log_level(cls, value) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index d87b567149a..1520358fdb7 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -19,7 +19,14 @@ ClusterTypeInModel, NoAuthentication, ) -from pydantic import AnyHttpUrl, AnyUrl, Field, NonNegativeInt, validator +from pydantic import ( + AliasChoices, + AnyHttpUrl, + AnyUrl, + Field, + NonNegativeInt, + field_validator, +) from settings_library.base import BaseCustomSettings from settings_library.catalog import CatalogSettings from settings_library.docker_registry import RegistrySettings @@ -46,18 +53,20 @@ class DirectorV0Settings(BaseCustomSettings): DIRECTOR_V0_ENABLED: bool = True DIRECTOR_HOST: str = "director" - DIRECTOR_PORT: PortInt = PortInt(8080) + DIRECTOR_PORT: PortInt = 8080 DIRECTOR_V0_VTAG: VersionTag = Field( default="v0", description="Director-v0 service API's version tag" ) @cached_property def endpoint(self) -> str: - url: str = AnyHttpUrl.build( - scheme="http", - host=self.DIRECTOR_HOST, - port=f"{self.DIRECTOR_PORT}", - path=f"/{self.DIRECTOR_V0_VTAG}", + url: str = str( + AnyHttpUrl.build( + scheme="http", + host=self.DIRECTOR_HOST, + port=self.DIRECTOR_PORT, + path=f"/{self.DIRECTOR_V0_VTAG}", + ) ) return url @@ -105,7 +114,7 @@ def default_cluster(self) -> Cluster: type=ClusterTypeInModel.ON_PREMISE, ) - @validator("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", pre=True) + @field_validator("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", mode="before") @classmethod def _empty_auth_is_none(cls, v): if not v: @@ -120,14 +129,14 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=["DIRECTOR_V2_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices("DIRECTOR_V2_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) DIRECTOR_V2_DEV_FEATURES_ENABLED: bool = False @@ -159,7 +168,9 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None # extras - SWARM_STACK_NAME: str = Field("undefined-please-check", env="SWARM_STACK_NAME") + SWARM_STACK_NAME: str = Field( + "undefined-please-check", validation_alias="SWARM_STACK_NAME" + ) SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field( default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)" @@ -181,34 +192,48 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): ) # debug settings - CLIENT_REQUEST: ClientRequestSettings = Field(auto_default_from_env=True) + CLIENT_REQUEST: ClientRequestSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) # App modules settings --------------------- - DIRECTOR_V2_STORAGE: StorageSettings = Field(auto_default_from_env=True) + DIRECTOR_V2_STORAGE: StorageSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) - DIRECTOR_V2_CATALOG: CatalogSettings | None = Field(auto_default_from_env=True) + DIRECTOR_V2_CATALOG: CatalogSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - DIRECTOR_V0: DirectorV0Settings = Field(auto_default_from_env=True) + DIRECTOR_V0: DirectorV0Settings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - DYNAMIC_SERVICES: DynamicServicesSettings = Field(auto_default_from_env=True) + DYNAMIC_SERVICES: DynamicServicesSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - POSTGRES: PostgresSettings = Field(auto_default_from_env=True) + POSTGRES: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - REDIS: RedisSettings = Field(auto_default_from_env=True) + REDIS: RedisSettings = Field(json_schema_extra={"auto_default_from_env": True}) - DIRECTOR_V2_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) + DIRECTOR_V2_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) TRAEFIK_SIMCORE_ZONE: str = Field("internal_simcore_stack") DIRECTOR_V2_COMPUTATIONAL_BACKEND: ComputationalBackendSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DIRECTOR_V2_DOCKER_REGISTRY: RegistrySettings = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="settings for the private registry deployed with the platform", ) DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field( @@ -216,7 +241,7 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): ) DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="resource usage tracker service client's plugin", ) @@ -225,7 +250,7 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): description="Base URL used to access the public api e.g. http://127.0.0.1:6000 for development or https://api.osparc.io", ) - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: str) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py index 6e156607ae6..46862a9d6d1 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py @@ -1,10 +1,10 @@ from contextlib import suppress -from typing import Any, ClassVar, cast +from typing import cast import networkx as nx from models_library.projects import ProjectID from models_library.projects_state import RunningState -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict, field_validator from simcore_postgres_database.models.comp_pipeline import StateType from ..utils.db import DB_TO_RUNNING_STATE @@ -15,7 +15,7 @@ class CompPipelineAtDB(BaseModel): dag_adjacency_list: dict[str, list[str]] # json serialization issue if using NodeID state: RunningState - @validator("state", pre=True) + @field_validator("state", mode="before") @classmethod def convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -27,7 +27,7 @@ def convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("dag_adjacency_list", pre=True) + @field_validator("dag_adjacency_list", mode="before") @classmethod def auto_convert_dag(cls, v): # this enforcement is here because the serialization using json is not happy with non str Dict keys, also comparison gets funny if the lists are having sometimes UUIDs or str. @@ -42,22 +42,4 @@ def get_graph(self) -> nx.DiGraph: ), ) - class Config: - orm_mode = True - - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [ - # DB model - { - "project_id": "65fee9d2-e030-452c-a29c-45d288577ca5", - "dag_adjacency_list": { - "539531c4-afb9-4ca8-bda3-06ad3d7bc339": [ - "f98e20e5-b235-43ed-a63d-15b71bc7c762" - ], - "f98e20e5-b235-43ed-a63d-15b71bc7c762": [], - "5332fcde-b043-41f5-8786-a3a359b110ad": [], - }, - "state": "NOT_STARTED", - } - ] - } + model_config = ConfigDict(from_attributes=True) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 1d7800b9788..7a8d901bdc7 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -1,13 +1,13 @@ import datetime from contextlib import suppress -from typing import Any, ClassVar, TypedDict +from typing import TypedDict from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.users import UserID -from pydantic import BaseModel, PositiveInt, validator +from pydantic import BaseModel, ConfigDict, PositiveInt, field_validator from simcore_postgres_database.models.comp_pipeline import StateType from ..utils.db import DB_TO_RUNNING_STATE @@ -49,7 +49,7 @@ class CompRunsAtDB(BaseModel): metadata: RunMetadataDict = RunMetadataDict() use_on_demand_clusters: bool - @validator("result", pre=True) + @field_validator("result", mode="before") @classmethod def convert_result_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -61,30 +61,30 @@ def convert_result_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("cluster_id", pre=True) + @field_validator("cluster_id", mode="before") @classmethod def convert_null_to_default_cluster_id(cls, v): if v is None: v = DEFAULT_CLUSTER_ID return v - @validator("created", "modified", "started", "ended") + @field_validator("created", "modified", "started", "ended") @classmethod def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: v = v.replace(tzinfo=datetime.timezone.utc) return v - @validator("metadata", pre=True) + @field_validator("metadata", mode="before") @classmethod def convert_null_to_empty_metadata(cls, v): if v is None: v = RunMetadataDict() return v - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -119,4 +119,5 @@ class Config: "use_on_demand_clusters": False, }, ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index f1076b7b83b..fa00438a642 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -1,6 +1,6 @@ import datetime from contextlib import suppress -from typing import Any, ClassVar +from typing import Any from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict from models_library.api_schemas_directorv2.services import NodeRequirements @@ -17,11 +17,11 @@ from pydantic import ( BaseModel, ByteSize, - Extra, + ConfigDict, Field, PositiveInt, - parse_obj_as, - validator, + TypeAdapter, + field_validator, ) from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_tasks import NodeClass @@ -30,8 +30,8 @@ class Image(BaseModel): - name: str = Field(..., regex=SERVICE_KEY_RE.pattern) - tag: str = Field(..., regex=SIMPLE_VERSION_RE) + name: str = Field(..., pattern=SERVICE_KEY_RE.pattern) + tag: str = Field(..., pattern=SIMPLE_VERSION_RE) requires_gpu: bool | None = Field( default=None, deprecated=True, description="Use instead node_requirements" @@ -40,7 +40,9 @@ class Image(BaseModel): default=None, deprecated=True, description="Use instead node_requirements" ) node_requirements: NodeRequirements | None = Field( - default=None, description="the requirements for the service to run on a node" + default=None, + description="the requirements for the service to run on a node", + validate_default=True, ) boot_mode: BootMode = BootMode.CPU command: list[str] = Field( @@ -53,7 +55,7 @@ class Image(BaseModel): default_factory=dict, description="The environment to use to run the service" ) - @validator("node_requirements", pre=True, always=True) + @field_validator("node_requirements", mode="before") @classmethod def migrate_from_requirements(cls, v, values): if v is None: @@ -64,13 +66,13 @@ def migrate_from_requirements(cls, v, values): v = NodeRequirements( CPU=1.0, GPU=1 if values.get("requires_gpu") else 0, - RAM=parse_obj_as(ByteSize, "128 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), ) return v - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ { "name": "simcore/services/dynamic/jupyter-octave-python-math", @@ -91,12 +93,15 @@ class Config: "requires_mpi": False, } ] - } + }, + ) # NOTE: for a long time defaultValue field was added to ServiceOutput wrongly in the DB. # this flags allows parsing of the outputs without error. This MUST not leave the director-v2! class _ServiceOutputOverride(ServiceOutput): + # TODO[pydantic]: The `Config` class inherits from another class, please create the `model_config` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. class Config(ServiceOutput.Config): extra = Extra.ignore @@ -107,10 +112,7 @@ class Config(ServiceOutput.Config): class NodeSchema(BaseModel): inputs: ServiceInputsDict = Field(..., description="the inputs scheam") outputs: _ServiceOutputsOverride = Field(..., description="the outputs schema") - - class Config: - extra = Extra.forbid - orm_mode = True + model_config = ConfigDict(extra="ignore", extra="forbid", from_attributes=True) class CompTaskAtDB(BaseModel): @@ -147,10 +149,11 @@ class CompTaskAtDB(BaseModel): created: datetime.datetime modified: datetime.datetime # Additional information about price and hardware (ex. AWS EC2 instance type) - pricing_info: dict | None + pricing_info: dict | None = None hardware_info: HardwareInfo - @validator("state", pre=True) + @field_validator("state", mode="before") + @classmethod @classmethod def convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -162,14 +165,16 @@ def convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("start", "end", "submit") + @field_validator("start", "end", "submit") + @classmethod @classmethod def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: v = v.replace(tzinfo=datetime.timezone.utc) return v - @validator("hardware_info", pre=True) + @field_validator("hardware_info", mode="before") + @classmethod @classmethod def backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: if v is None: @@ -182,65 +187,4 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: comp_task_dict["state"] = RUNNING_STATE_TO_DB[comp_task_dict["state"]].value return comp_task_dict - class Config: - extra = Extra.forbid - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { - "examples": [ - # DB model - { - "task_id": 324, - "project_id": "341351c4-23d1-4366-95d0-bc01386001a7", - "node_id": "7f62be0e-1298-4fe4-be76-66b6e859c260", - "job_id": None, - "internal_id": 3, - "schema": { - "inputs": { - "input_1": { - "label": "input_files", - "description": "Any input files. One or serveral files compressed in a zip will be downloaded in an inputs folder.", - "type": "data:*/*", - "displayOrder": 1.0, - } - }, - "outputs": { - "output_1": { - "label": "Output files", - "description": "Output files uploaded from the outputs folder", - "type": "data:*/*", - "displayOrder": 1.0, - } - }, - }, - "inputs": { - "input_1": { - "nodeUuid": "48a7ac7a-cfc3-44a6-ba9b-5a1a578b922c", - "output": "output_1", - } - }, - "outputs": { - "output_1": { - "store": 0, - "path": "341351c4-23d1-4366-95d0-bc01386001a7/7f62be0e-1298-4fe4-be76-66b6e859c260/output_1.zip", - } - }, - "image": image_example, - "submit": "2021-03-01 13:07:34.19161", - "node_class": "INTERACTIVE", - "state": "NOT_STARTED", - "progress": 0.44, - "last_heartbeat": None, - "created": "2022-05-20 13:28:31.139+00", - "modified": "2023-06-23 15:58:32.833081+00", - "pricing_info": { - "pricing_plan_id": 1, - "pricing_unit_id": 1, - "pricing_unit_cost_id": 1, - }, - "hardware_info": HardwareInfo.model_config["json_schema_extra"][ - "examples" - ][0], - } - for image_example in Image.model_config["json_schema_extra"]["examples"] - ] - } + model_config = ConfigDict(extra="forbid", from_attributes=True) diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index c5e42263fe4..bd8754815d9 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -1,11 +1,10 @@ import json import logging -import re from collections.abc import Mapping from enum import Enum from functools import cached_property from pathlib import Path -from typing import Any, TypeAlias +from typing import Annotated, Any, TypeAlias from uuid import UUID from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate @@ -28,11 +27,11 @@ from pydantic import ( AnyHttpUrl, BaseModel, - ConstrainedStr, - Extra, + ConfigDict, Field, - parse_obj_as, - validator, + StringConstraints, + TypeAdapter, + field_validator, ) from servicelib.error_codes import ErrorCodeStr from servicelib.exception_utils import DelayedExceptionHandler @@ -53,18 +52,14 @@ DockerStatus: TypeAlias = Status2 -class DockerId(ConstrainedStr): - max_length = 25 - regex = re.compile(r"[A-Za-z0-9]{25}") +DockerId = Annotated[str, StringConstraints(max_length=25, pattern=r"[A-Za-z0-9]{25}")] ServiceId: TypeAlias = DockerId NetworkId: TypeAlias = DockerId -class ServiceName(ConstrainedStr): - strip_whitespace = True - min_length = 2 +ServiceName = Annotated[str, StringConstraints(strip_whitespace=True, min_length=2)] logger = logging.getLogger() @@ -139,9 +134,10 @@ def from_container(cls, container: dict[str, Any]) -> "DockerContainerInspect": id=container["Id"], ) - class Config: - keep_untouched = (cached_property,) - allow_mutation = False + model_config = ConfigDict( + ignored_types=(cached_property,), + frozen=True, + ) class ServiceRemovalState(BaseModel): @@ -178,7 +174,7 @@ class DynamicSidecar(BaseModel): is_ready: bool = Field( default=False, - scription=( + description=( "is True while the health check on the dynamic-sidecar is responding. " "Meaning that the dynamic-sidecar is reachable and can accept requests" ), @@ -200,7 +196,7 @@ def compose_spec_submitted(self) -> bool: containers_inspect: list[DockerContainerInspect] = Field( [], - scription="docker inspect results from all the container ran at regular intervals", + description="docker inspect results from all the container ran at regular intervals", ) was_dynamic_sidecar_started: bool = False @@ -288,9 +284,7 @@ def compose_spec_submitted(self) -> bool: "this value will be set to None." ), ) - - class Config: - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) class DynamicSidecarNamesHelper(BaseModel): @@ -308,25 +302,25 @@ class DynamicSidecarNamesHelper(BaseModel): service_name_dynamic_sidecar: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, max_length=MAX_ALLOWED_SERVICE_NAME_LENGTH, description="unique name of the dynamic-sidecar service", ) proxy_service_name: str = Field( ..., - regex=REGEX_DY_SERVICE_PROXY, + pattern=REGEX_DY_SERVICE_PROXY, max_length=MAX_ALLOWED_SERVICE_NAME_LENGTH, description="name of the proxy for the dynamic-sidecar", ) simcore_traefik_zone: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, description="unique name for the traefik constraints", ) dynamic_sidecar_network_name: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, description="based on the node_id and project_id", ) @@ -363,15 +357,13 @@ class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): hostname: str = Field( ..., description="dy-sidecar's service hostname (provided by docker-swarm)" ) - port: PortInt = Field( - default=parse_obj_as(PortInt, 8000), description="dynamic-sidecar port" - ) + port: PortInt = Field(default=8000, description="dynamic-sidecar port") @property def endpoint(self) -> AnyHttpUrl: """endpoint where all the services are exposed""" - url: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, f"http://{self.hostname}:{self.port}" # NOSONAR + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{self.hostname}:{self.port}" # NOSONAR ) return url @@ -396,7 +388,7 @@ def endpoint(self) -> AnyHttpUrl: ) service_port: PortInt = Field( - default=parse_obj_as(PortInt, TEMPORARY_PORT_NUMBER), + default=TEMPORARY_PORT_NUMBER, description=( "port where the service is exposed defined by the service; " "NOTE: temporary default because it will be changed once the service " @@ -441,8 +433,7 @@ def endpoint(self) -> AnyHttpUrl: def get_proxy_endpoint(self) -> AnyHttpUrl: """get the endpoint where the proxy's admin API is exposed""" assert self.proxy_admin_api_port # nosec - url: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( f"http://{self.proxy_service_name}:{self.proxy_admin_api_port}", # nosec # NOSONAR ) return url @@ -501,7 +492,8 @@ def from_http_request( obj_dict["run_id"] = run_id return cls.model_validate(obj_dict) - @validator("user_preferences_path", pre=True) + @field_validator("user_preferences_path", mode="before") + @classmethod @classmethod def strip_path_serialization_to_none(cls, v): if v == "None": @@ -513,15 +505,13 @@ def from_service_inspect( cls, service_inspect: Mapping[str, Any] ) -> "SchedulerData": labels = service_inspect["Spec"]["Labels"] - return cls.parse_raw(labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL]) + return cls.model_validate_json(labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL]) def as_label_data(self) -> str: # compose_spec needs to be json encoded before encoding it to json # and storing it in the label - return self.copy( + return self.model_copy( update={"compose_spec": json.dumps(self.compose_spec)}, deep=True - ).json() + ).model_dump_json() - class Config: - extra = Extra.allow - allow_population_by_field_name = True + model_config = ConfigDict(extra="allow", populate_by_name=True) diff --git a/services/director-v2/src/simcore_service_director_v2/models/pricing.py b/services/director-v2/src/simcore_service_director_v2/models/pricing.py index 4aabef7cd10..52a61d8c9e3 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/pricing.py +++ b/services/director-v2/src/simcore_service_director_v2/models/pricing.py @@ -1,12 +1,11 @@ from decimal import Decimal -from typing import Any, ClassVar from models_library.resource_tracker import ( PricingPlanId, PricingUnitCostId, PricingUnitId, ) -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class PricingInfo(BaseModel): @@ -14,9 +13,8 @@ class PricingInfo(BaseModel): pricing_unit_id: PricingUnitId pricing_unit_cost_id: PricingUnitCostId pricing_unit_cost: Decimal - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, @@ -26,3 +24,4 @@ class Config: } ] } + ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py index d1022618f09..eebb6d52871 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/volume_remover.py @@ -26,6 +26,8 @@ class DockerVersion(str): """ @classmethod + # TODO[pydantic]: We couldn't refactor `__get_validators__`, please create the `__get_pydantic_core_schema__` manually. + # Check https://docs.pydantic.dev/latest/migration/#defining-custom-types for more information. def __get_validators__(cls): yield cls.validate_docker_version From 713343b76b54fd40034015eef3e976f5c299d2e7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 1 Oct 2024 20:41:41 +0200 Subject: [PATCH 169/280] upgrade requirements --- services/storage/requirements/_base.txt | 34 +++++++++++++++++++----- services/storage/requirements/_test.txt | 34 +++++++++--------------- services/storage/requirements/_tools.txt | 10 ------- 3 files changed, 40 insertions(+), 38 deletions(-) diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index a2136baf20a..6c8b6415c6f 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -61,6 +61,8 @@ aiozipkin==1.1.1 # via -r requirements/../../../packages/service-library/requirements/_aiohttp.in alembic==1.13.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -70,15 +72,14 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -120,8 +121,6 @@ dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via pydantic -exceptiongroup==1.2.1 - # via anyio fast-depends==2.4.2 # via faststream faststream==0.5.10 @@ -254,7 +253,7 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -266,7 +265,6 @@ pydantic==1.10.14 # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -283,6 +281,26 @@ pydantic==1.10.14 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.17.2 # via rich pyinstrument==4.6.2 @@ -293,6 +311,8 @@ python-dateutil==2.9.0.post0 # via # arrow # botocore +python-dotenv==1.0.1 + # via pydantic-settings pyyaml==6.0.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -422,9 +442,9 @@ typing-extensions==4.10.0 # aiodebug # aiodocker # alembic - # anyio # faststream # pydantic + # pydantic-core # typer # types-aiobotocore # types-aiobotocore-ec2 diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index c88441caaad..c6ae8a372bf 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -11,13 +11,12 @@ aiosignal==1.3.1 # via # -c requirements/_base.txt # aiohttp -antlr4-python3-runtime==4.13.2 - # via moto -async-timeout==4.0.3 +annotated-types==0.7.0 # via # -c requirements/_base.txt - # aiohttp - # redis + # pydantic +antlr4-python3-runtime==4.13.2 + # via moto attrs==23.2.0 # via # -c requirements/_base.txt @@ -73,10 +72,6 @@ docker==7.1.0 # via # -r requirements/_test.in # moto -exceptiongroup==1.2.1 - # via - # -c requirements/_base.txt - # pytest faker==27.0.0 # via -r requirements/_test.in fakeredis==2.23.5 @@ -94,10 +89,6 @@ frozenlist==1.4.1 # aiosignal graphql-core==3.2.3 # via moto -greenlet==3.0.3 - # via - # -c requirements/_base.txt - # sqlalchemy icdiff==2.0.7 # via pytest-icdiff idna==3.6 @@ -202,11 +193,15 @@ py-partiql-parser==0.5.5 # via moto pycparser==2.22 # via cffi -pydantic==1.10.14 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic pyparsing==3.1.2 # via moto pytest==8.3.2 @@ -247,7 +242,9 @@ python-dateutil==2.9.0.post0 # pandas # simcore-service-storage-sdk python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pytz==2024.1 # via pandas pyyaml==6.0.1 @@ -317,11 +314,6 @@ sympy==1.13.2 # via cfn-lint termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via - # coverage - # mypy - # pytest types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in typing-extensions==4.10.0 @@ -329,9 +321,9 @@ typing-extensions==4.10.0 # -c requirements/_base.txt # aws-sam-translator # cfn-lint - # fakeredis # mypy # pydantic + # pydantic-core # sqlalchemy2-stubs tzdata==2024.1 # via pandas diff --git a/services/storage/requirements/_tools.txt b/services/storage/requirements/_tools.txt index b84cbbeb9fb..9ea4ddf842c 100644 --- a/services/storage/requirements/_tools.txt +++ b/services/storage/requirements/_tools.txt @@ -77,22 +77,12 @@ setuptools==73.0.1 # via # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.10.0 # via # -c requirements/_base.txt # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 12ec0a330f85a0d6cf4d22536ce26dc3cffde4d6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 09:57:59 +0200 Subject: [PATCH 170/280] upgrade requirements --- packages/models-library/requirements/_base.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index 03e1b39d677..f07b0ddd44b 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -20,13 +20,13 @@ orjson==3.10.7 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -pydantic==2.9.1 +pydantic==2.9.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pydantic-extra-types==2.9.0 # via -r requirements/_base.in From ca36539ede2f01c9d38f57cbf9e076b928e0c7db Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 10:04:52 +0200 Subject: [PATCH 171/280] fix pylint --- .../src/models_library/projects.py | 15 +++++++-------- .../src/models_library/projects_nodes.py | 19 ++++++++++--------- .../utils/json_serialization.py | 7 ++++--- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 440bfe915bb..9a469145eec 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -113,6 +113,13 @@ def convert_sql_alchemy_enum(cls, v): ) +def _patch_json_schema_extra(schema: dict) -> None: + # Patch to allow jsonschema nullable + # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530 + state_pydantic_schema = deepcopy(schema["properties"]["state"]) + schema["properties"]["state"] = {"anyOf": [{"type": "null"}, state_pydantic_schema]} + + class Project(BaseProjectModel): # NOTE: This is the pydantic pendant of project-v0.0.1.json used in the API of the webserver/webclient # NOT for usage with DB!! @@ -172,14 +179,6 @@ class Project(BaseProjectModel): alias="workspaceId", ) - def _patch_json_schema_extra(self, schema: dict) -> None: - # Patch to allow jsonschema nullable - # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530 - state_pydantic_schema = deepcopy(schema["properties"]["state"]) - schema["properties"]["state"] = { - "anyOf": [{"type": "null"}, state_pydantic_schema] - } - model_config = ConfigDict( title="osparc-simcore project", extra="forbid", diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index b074dd4dec6..3ce49cf7491 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -113,6 +113,16 @@ class NodeState(BaseModel): ) +def _patch_json_schema_extra(schema: dict) -> None: + # NOTE: exporting without this trick does not make runHash as nullable. + # It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270 + for prop_name in ["parent", "runHash"]: + if prop_name in schema.get("properties", {}): + prop = deepcopy(schema["properties"][prop_name]) + prop["nullable"] = True + schema["properties"][prop_name] = prop + + class Node(BaseModel): key: ServiceKey = Field( ..., @@ -234,15 +244,6 @@ def convert_from_enum(cls, v): return NodeState(currentStatus=running_state_value) return v - def _patch_json_schema_extra(self, schema: dict) -> None: - # NOTE: exporting without this trick does not make runHash as nullable. - # It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270 - for prop_name in ["parent", "runHash"]: - if prop_name in schema.get("properties", {}): - prop = deepcopy(schema["properties"][prop_name]) - prop["nullable"] = True - schema["properties"][prop_name] = prop - model_config = ConfigDict( extra="forbid", json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item] diff --git a/packages/models-library/src/models_library/utils/json_serialization.py b/packages/models-library/src/models_library/utils/json_serialization.py index 69ffb00572d..a2fee1295f1 100644 --- a/packages/models-library/src/models_library/utils/json_serialization.py +++ b/packages/models-library/src/models_library/utils/json_serialization.py @@ -56,8 +56,8 @@ def decimal_encoder(dec_value: Decimal) -> int | float: """ if dec_value.as_tuple().exponent >= 0: # type: ignore[operator] return int(dec_value) - else: - return float(dec_value) + + return float(dec_value) ENCODERS_BY_TYPE: dict[type[Any], Callable[[Any], Any]] = { @@ -95,7 +95,8 @@ def pydantic_encoder(obj: Any) -> Any: if isinstance(obj, BaseModel): return obj.model_dump() - elif is_dataclass(obj): + + if is_dataclass(obj): return asdict(obj) # type: ignore[call-overload] # Check the class type and its superclasses for a matching encoder From 4e49d2dd85bb29146cf4693bf52297ef76df2744 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 10:26:43 +0200 Subject: [PATCH 172/280] fix endopoints type --- packages/settings-library/requirements/dev.txt | 1 + .../settings-library/src/settings_library/ec2.py | 12 ++++++++---- packages/settings-library/src/settings_library/s3.py | 12 ++++++++---- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/packages/settings-library/requirements/dev.txt b/packages/settings-library/requirements/dev.txt index 32d383e9ccc..0b760b84076 100644 --- a/packages/settings-library/requirements/dev.txt +++ b/packages/settings-library/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../pytest-simcore/ +--editable ../models-library # current module --editable . diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index 6a9ab6f65f5..22d2d9af9ee 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,14 +1,18 @@ -from pydantic import AnyHttpUrl, Field +from typing import Annotated + +from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + class EC2Settings(BaseCustomSettings): EC2_ACCESS_KEY_ID: str - EC2_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + EC2_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") EC2_REGION_NAME: str = "us-east-1" EC2_SECRET_ACCESS_KEY: str diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index 18f23860658..95268b41920 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,16 +1,20 @@ -from pydantic import AnyHttpUrl, Field +from typing import Annotated + +from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import IDStr +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + S3_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") S3_REGION: IDStr S3_SECRET_KEY: IDStr From f0fc1d75f34a5f42e927617f452a8d026a49b1db Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 11:00:05 +0200 Subject: [PATCH 173/280] fix pagination urls type --- .../src/models_library/basic_types.py | 6 +----- .../src/models_library/rest_pagination.py | 16 ++++++++++++++-- .../src/models_library/rest_pagination_utils.py | 14 +++++++------- .../tests/test_rest_pagination_utils.py | 4 ++-- 4 files changed, 24 insertions(+), 16 deletions(-) diff --git a/packages/models-library/src/models_library/basic_types.py b/packages/models-library/src/models_library/basic_types.py index d61eff22617..28e3710adeb 100644 --- a/packages/models-library/src/models_library/basic_types.py +++ b/packages/models-library/src/models_library/basic_types.py @@ -3,8 +3,7 @@ from re import Pattern from typing import Annotated, Final, TypeAlias -import pydantic -from pydantic import Field, PositiveInt, StringConstraints +from pydantic import Field, HttpUrl, PositiveInt, StringConstraints from pydantic_core import core_schema from .basic_regex import ( @@ -130,9 +129,6 @@ class LongTruncatedStr(ConstrainedStr): IdInt: TypeAlias = PositiveInt PrimaryKeyInt: TypeAlias = PositiveInt -AnyHttpUrl = Annotated[str, pydantic.AnyHttpUrl] - -HttpUrl = Annotated[str, pydantic.HttpUrl] # https e.g. https://techterms.com/definition/https class HttpSecureUrl(HttpUrl): diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index d8e3b9990b3..ff10ebf4ead 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -3,6 +3,7 @@ from pydantic import ( AnyHttpUrl, BaseModel, + BeforeValidator, ConfigDict, Field, NonNegativeInt, @@ -14,13 +15,17 @@ from .utils.common_validators import none_to_empty_list_pre_validator +_ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + # Default limit values # - Using same values across all pagination entrypoints simplifies # interconnecting paginated calls MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE: Final[int] = 50 -PageLimitInt: TypeAlias = Annotated[int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE)] +PageLimitInt: TypeAlias = Annotated[ + int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE) +] DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = TypeAdapter( PageLimitInt @@ -92,7 +97,14 @@ class PageRefs(BaseModel, Generic[RefT]): model_config = ConfigDict(extra="forbid") -class PageLinks(PageRefs[Annotated[str, AnyHttpUrl]]): +class PageLinks( + PageRefs[ + Annotated[ + str, + BeforeValidator(lambda x: str(_ANY_HTTP_URL_ADAPTER.validate_python(x))), + ] + ] +): ... diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index ff56a329f80..660b69ba303 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -1,9 +1,8 @@ from math import ceil from typing import Any, Protocol, TypedDict, Union, runtime_checkable -from pydantic import TypeAdapter +from pydantic import AnyHttpUrl, TypeAdapter -from .basic_types import AnyHttpUrl from .rest_pagination import PageLinks, PageMetaInfoLimitOffset # NOTE: In this repo we use two type of URL-like data structures: @@ -30,6 +29,7 @@ def replace_query_params(self, **kwargs: Any) -> "_StarletteURL": _URLType = Union[_YarlURL, _StarletteURL] +_ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) def _replace_query(url: _URLType, query: dict[str, Any]) -> str: @@ -73,21 +73,21 @@ def paginate_data( ), _links=PageLinks( self=( - TypeAdapter(AnyHttpUrl).validate_python( + _ANY_HTTP_URL_ADAPTER.validate_python( _replace_query(request_url, {"offset": offset, "limit": limit}), ) ), - first=TypeAdapter(AnyHttpUrl).validate_python( + first=_ANY_HTTP_URL_ADAPTER.validate_python( _replace_query(request_url, {"offset": 0, "limit": limit}) ), - prev=TypeAdapter(AnyHttpUrl).validate_python( + prev=_ANY_HTTP_URL_ADAPTER.validate_python( _replace_query( request_url, {"offset": max(offset - limit, 0), "limit": limit} ), ) if offset > 0 else None, - next=TypeAdapter(AnyHttpUrl).validate_python( + next=_ANY_HTTP_URL_ADAPTER.validate_python( _replace_query( request_url, {"offset": min(offset + limit, last_page * limit), "limit": limit}, @@ -95,7 +95,7 @@ def paginate_data( ) if offset < (last_page * limit) else None, - last=TypeAdapter(AnyHttpUrl).validate_python( + last=_ANY_HTTP_URL_ADAPTER.validate_python( _replace_query( request_url, {"offset": last_page * limit, "limit": limit} ), diff --git a/packages/models-library/tests/test_rest_pagination_utils.py b/packages/models-library/tests/test_rest_pagination_utils.py index 1fc466f5f8c..acaf6bc9d5c 100644 --- a/packages/models-library/tests/test_rest_pagination_utils.py +++ b/packages/models-library/tests/test_rest_pagination_utils.py @@ -75,7 +75,7 @@ def test_paginating_data(base_url): offset += len(data_chunk) assert model_instance.links.next is not None - data_obj: PageDict = paginate_data( + data_obj: PageDict = paginate_data( # type: ignore[no-redef] data_chunk, request_url=URL(model_instance.links.next), total=total_number_of_items, @@ -127,7 +127,7 @@ def test_paginating_data(base_url): assert offset == last_chunk_offset assert model_instance.links.next is not None - data_obj: PageDict = paginate_data( + data_obj: PageDict = paginate_data( # type: ignore[no-redef] data_chunk, request_url=URL(model_instance.links.next), total=total_number_of_items, From aa6a73f8987d9d650261ae15b57422007fb16920 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 11:03:21 +0200 Subject: [PATCH 174/280] fix sonar --- .../src/servicelib/aiohttp/long_running_tasks/_server.py | 6 +++--- packages/settings-library/src/settings_library/tracing.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index d94914fcf91..2d09a65846f 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -68,13 +68,13 @@ async def start_long_running_task( "sockname" ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info status_url = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" + f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" # NOSONAR ) result_url = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" + f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR ) abort_url = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" + f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" # NOSONAR ) task_get = TaskGet( task_id=task_id, diff --git a/packages/settings-library/src/settings_library/tracing.py b/packages/settings-library/src/settings_library/tracing.py index e363bf2e19f..85b7abae5ff 100644 --- a/packages/settings-library/src/settings_library/tracing.py +++ b/packages/settings-library/src/settings_library/tracing.py @@ -7,11 +7,11 @@ class TracingSettings(BaseCustomSettings): TRACING_ZIPKIN_ENDPOINT: AnyUrl = Field( - default=TypeAdapter(AnyUrl).validate_python("http://jaeger:9411"), + default=TypeAdapter(AnyUrl).validate_python("http://jaeger:9411"), # NOSONAR description="Zipkin compatible endpoint", ) TRACING_THRIFT_COMPACT_ENDPOINT: AnyUrl = Field( - default=TypeAdapter(AnyUrl).validate_python("http://jaeger:5775"), + default=TypeAdapter(AnyUrl).validate_python("http://jaeger:5775"), # NOSONAR description="accept zipkin.thrift over compact thrift protocol (deprecated, used by legacy clients only)", ) TRACING_CLIENT_NAME: str = Field( From 5debf98573ad056d32c228de46b27074e5f58354 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 11:10:25 +0200 Subject: [PATCH 175/280] fix code smells --- packages/settings-library/src/settings_library/postgres.py | 2 +- packages/settings-library/src/settings_library/utils_cli.py | 5 ++--- .../src/simcore_sdk/node_ports_v2/ports_mapping.py | 5 ++--- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index e224b1d71f8..ed7377877be 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -49,7 +49,7 @@ class PostgresSettings(BaseCustomSettings): @field_validator("POSTGRES_MAXSIZE") @classmethod def _check_size(cls, v, info: ValidationInfo): - if not (info.data["POSTGRES_MINSIZE"] <= v): + if info.data["POSTGRES_MINSIZE"] > v: msg = f"assert POSTGRES_MINSIZE={info.data['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" raise ValueError(msg) return v diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 05e0f0ca7bc..4bdaf5c0e9c 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -60,9 +60,8 @@ def print_as_envfile( elif show_secrets and hasattr(value, "get_secret_value"): value = value.get_secret_value() - if verbose: - if field.description: - typer.echo(f"# {field.description}") + if verbose and field.description: + typer.echo(f"# {field.description}") typer.echo(f"{name}={value}") diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py index 611684df7cf..9fb13510afb 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/ports_mapping.py @@ -9,9 +9,8 @@ class BasePortsMapping(RootModel[dict[ServicePortKey, Port]]): def __getitem__(self, key: int | ServicePortKey) -> Port: - if isinstance(key, int): - if key < len(self.root): - key = list(self.root.keys())[key] + if isinstance(key, int) and key < len(self.root): + key = list(self.root.keys())[key] if key not in self.root: raise UnboundPortError(key) assert isinstance(key, str) # nosec From 9d93d7e358642dd53ce7b61a72bb607ef0ccef3d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 13:41:50 +0200 Subject: [PATCH 176/280] continue upgrading --- .../src/aws_library/ec2/_client.py | 2 +- .../aws-library/src/aws_library/s3/_client.py | 16 ++-- .../src/aws_library/ssm/_client.py | 6 +- packages/aws-library/tests/test_ec2_models.py | 4 +- packages/aws-library/tests/test_s3_client.py | 58 +++++++------- packages/aws-library/tests/test_s3_utils.py | 46 ++++++----- .../container_tasks/events.py | 14 ++-- .../src/models_library/api_schemas_storage.py | 8 +- .../src/models_library/basic_regex.py | 10 +-- .../src/models_library/projects_nodes_io.py | 3 +- .../utils/pydantic_fields_extension.py | 3 +- .../tests/test_utils_pydantic_extension.py | 76 +++++++++++++++++++ .../tests/test_utils_serialization.py | 34 +++++++++ .../src/settings_library/rabbit.py | 3 +- .../src/settings_library/ssm.py | 12 ++- 15 files changed, 203 insertions(+), 92 deletions(-) create mode 100644 packages/models-library/tests/test_utils_pydantic_extension.py create mode 100644 packages/models-library/tests/test_utils_serialization.py diff --git a/packages/aws-library/src/aws_library/ec2/_client.py b/packages/aws-library/src/aws_library/ec2/_client.py index 12c2a27fff0..14094939dde 100644 --- a/packages/aws-library/src/aws_library/ec2/_client.py +++ b/packages/aws-library/src/aws_library/ec2/_client.py @@ -41,7 +41,7 @@ async def create(cls, settings: EC2Settings) -> "SimcoreEC2API": session = aioboto3.Session() session_client = session.client( "ec2", - endpoint_url=str(settings.EC2_ENDPOINT), + endpoint_url=settings.EC2_ENDPOINT, aws_access_key_id=settings.EC2_ACCESS_KEY_ID, aws_secret_access_key=settings.EC2_SECRET_ACCESS_KEY, region_name=settings.EC2_REGION_NAME, diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 5d1b357dead..b2ae33cc9b4 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -43,6 +43,9 @@ _MAX_CONCURRENT_COPY: Final[int] = 4 _AWS_MAX_ITEMS_PER_PAGE: Final[int] = 1000 +_ANY_URL_ADAPTER: Final[TypeAdapter[AnyUrl]] = TypeAdapter(AnyUrl) +_LIST_ANY_URL_ADAPTER: Final[TypeAdapter[list[AnyUrl]]] = TypeAdapter(list[AnyUrl]) + class UploadedBytesTransferredCallback(Protocol): def __call__(self, bytes_transferred: int, *, file_name: str) -> None: @@ -70,7 +73,7 @@ async def create( session = aioboto3.Session() session_client = session.client( "s3", - endpoint_url=str(settings.S3_ENDPOINT), + endpoint_url=settings.S3_ENDPOINT, aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET_KEY, region_name=settings.S3_REGION, @@ -260,8 +263,7 @@ async def create_single_presigned_download_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = TypeAdapter(AnyUrl).validate_python(generated_link) - return url + return _ANY_URL_ADAPTER.validate_python(generated_link) @s3_exception_handler(_logger) async def create_single_presigned_upload_link( @@ -274,8 +276,7 @@ async def create_single_presigned_upload_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - url: AnyUrl = TypeAdapter(AnyUrl).validate_python(generated_link) - return url + return _ANY_URL_ADAPTER.validate_python(generated_link) @s3_exception_handler(_logger) async def create_multipart_upload_links( @@ -298,7 +299,7 @@ async def create_multipart_upload_links( # compute the number of links, based on the announced file size num_upload_links, chunk_size = compute_num_file_chunks(file_size) # now create the links - upload_links = TypeAdapter(list[AnyUrl]).validate_python( + upload_links = _LIST_ANY_URL_ADAPTER.validate_python( await asyncio.gather( *( self._client.generate_presigned_url( @@ -472,7 +473,6 @@ def is_multipart(file_size: ByteSize) -> bool: @staticmethod def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl: - url: AnyUrl = TypeAdapter(AnyUrl).validate_python( + return _ANY_URL_ADAPTER.validate_python( f"s3://{bucket}/{urllib.parse.quote(object_key)}" ) - return url diff --git a/packages/aws-library/src/aws_library/ssm/_client.py b/packages/aws-library/src/aws_library/ssm/_client.py index a46bf14bf93..2b51a93b82a 100644 --- a/packages/aws-library/src/aws_library/ssm/_client.py +++ b/packages/aws-library/src/aws_library/ssm/_client.py @@ -49,14 +49,16 @@ async def create(cls, settings: SSMSettings) -> "SimcoreSSMAPI": session = aioboto3.Session() session_client = session.client( "ssm", - endpoint_url=str(settings.SSM_ENDPOINT), + endpoint_url=settings.SSM_ENDPOINT, aws_access_key_id=settings.SSM_ACCESS_KEY_ID.get_secret_value(), aws_secret_access_key=settings.SSM_SECRET_ACCESS_KEY.get_secret_value(), region_name=settings.SSM_REGION_NAME, ) assert isinstance(session_client, ClientCreatorContext) # nosec exit_stack = contextlib.AsyncExitStack() - ec2_client = cast(SSMClient, await exit_stack.enter_async_context(session_client)) + ec2_client = cast( + SSMClient, await exit_stack.enter_async_context(session_client) + ) return cls(ec2_client, session, exit_stack) async def close(self) -> None: diff --git a/packages/aws-library/tests/test_ec2_models.py b/packages/aws-library/tests/test_ec2_models.py index 5d39b4cb746..ed232ad0043 100644 --- a/packages/aws-library/tests/test_ec2_models.py +++ b/packages/aws-library/tests/test_ec2_models.py @@ -110,12 +110,12 @@ def test_resources_create_as_empty(): ( Resources(cpus=0, ram=ByteSize(34)), Resources(cpus=1, ram=ByteSize(0)), - Resources.construct(cpus=-1, ram=ByteSize(34)), + Resources.model_construct(cpus=-1, ram=ByteSize(34)), ), ( Resources(cpus=0.1, ram=ByteSize(34)), Resources(cpus=1, ram=ByteSize(1)), - Resources.construct(cpus=-0.9, ram=ByteSize(33)), + Resources.model_construct(cpus=-0.9, ram=ByteSize(33)), ), ], ) diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index a7ef20d698c..e6313c50e9b 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -14,7 +14,7 @@ from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import Any, Final import botocore.exceptions import pytest @@ -49,6 +49,8 @@ from types_aiobotocore_s3 import S3Client from types_aiobotocore_s3.literals import BucketLocationConstraintType +_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize) + @pytest.fixture async def simcore_s3_api( @@ -111,7 +113,7 @@ async def _( file, MultiPartUploadLinks( upload_id="fake", - chunk_size=TypeAdapter(ByteSize).validate_python(file.stat().st_size), + chunk_size=_BYTE_SIZE_ADAPTER.validate_python(file.stat().st_size), urls=[presigned_url], ), ) @@ -135,7 +137,7 @@ async def with_uploaded_file_on_s3( s3_client: S3Client, with_s3_bucket: S3BucketName, ) -> AsyncIterator[UploadedFile]: - test_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("10Kib")) + test_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("10Kib")) await s3_client.upload_file( Filename=f"{test_file}", Bucket=with_s3_bucket, @@ -590,7 +592,7 @@ async def test_undelete_file( assert file_metadata.size == with_uploaded_file_on_s3.local_path.stat().st_size # upload another file on top of the existing one - new_file = create_file_of_size(TypeAdapter(ByteSize).validate_python("5Kib")) + new_file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("5Kib")) await s3_client.upload_file( Filename=f"{new_file}", Bucket=with_s3_bucket, @@ -745,7 +747,7 @@ async def test_create_single_presigned_upload_link( [Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None] ], ): - file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib")) + file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib")) s3_object_key = file.name presigned_url = await simcore_s3_api.create_single_presigned_upload_link( bucket=with_s3_bucket, @@ -773,7 +775,7 @@ async def test_create_single_presigned_upload_link_with_non_existing_bucket_rais create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, ): - file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1Mib")) + file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib")) s3_object_key = file.name with pytest.raises(S3BucketInvalidError): await simcore_s3_api.create_single_presigned_upload_link( @@ -1080,7 +1082,7 @@ async def test_copy_file_invalid_raises( create_file_of_size: Callable[[ByteSize], Path], faker: Faker, ): - file = create_file_of_size(TypeAdapter(ByteSize).validate_python("1MiB")) + file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1MiB")) uploaded_file = await upload_file(file) dst_object_key = faker.file_name() # NOTE: since aioboto3 13.1.0 this raises S3KeyNotFoundError instead of S3BucketInvalidError @@ -1105,9 +1107,9 @@ async def test_copy_file_invalid_raises( "directory_size, min_file_size, max_file_size", [ ( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("1B"), - TypeAdapter(ByteSize).validate_python("10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1131,9 +1133,9 @@ async def test_get_directory_metadata( "directory_size, min_file_size, max_file_size", [ ( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("1B"), - TypeAdapter(ByteSize).validate_python("10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1163,9 +1165,9 @@ async def test_get_directory_metadata_raises( "directory_size, min_file_size, max_file_size", [ ( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("1B"), - TypeAdapter(ByteSize).validate_python("10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1199,9 +1201,9 @@ async def test_delete_file_recursively( "directory_size, min_file_size, max_file_size", [ ( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("1B"), - TypeAdapter(ByteSize).validate_python("10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1237,9 +1239,9 @@ async def test_delete_file_recursively_raises( "directory_size, min_file_size, max_file_size", [ ( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("1B"), - TypeAdapter(ByteSize).validate_python("10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ) ], ids=byte_size_ids, @@ -1337,14 +1339,14 @@ def run_async_test(*args, **kwargs) -> None: "directory_size, min_file_size, max_file_size", [ ( - TypeAdapter(ByteSize).validate_python("1Mib"), - TypeAdapter(ByteSize).validate_python("1B"), - TypeAdapter(ByteSize).validate_python("10Kib"), + _BYTE_SIZE_ADAPTER.validate_python("1Mib"), + _BYTE_SIZE_ADAPTER.validate_python("1B"), + _BYTE_SIZE_ADAPTER.validate_python("10Kib"), ), ( - TypeAdapter(ByteSize).validate_python("500Mib"), - TypeAdapter(ByteSize).validate_python("10Mib"), - TypeAdapter(ByteSize).validate_python("50Mib"), + _BYTE_SIZE_ADAPTER.validate_python("500Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("50Mib"), ), ], ids=byte_size_ids, diff --git a/packages/aws-library/tests/test_s3_utils.py b/packages/aws-library/tests/test_s3_utils.py index cfba1634943..bf021978585 100644 --- a/packages/aws-library/tests/test_s3_utils.py +++ b/packages/aws-library/tests/test_s3_utils.py @@ -4,6 +4,8 @@ # pylint: disable=unused-variable +from typing import Final + import pytest from aws_library.s3._utils import ( _MULTIPART_MAX_NUMBER_OF_PARTS, @@ -13,59 +15,61 @@ from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.parametrizations import byte_size_ids +_BYTE_SIZE_ADAPTER: Final[TypeAdapter[ByteSize]] = TypeAdapter(ByteSize) + @pytest.mark.parametrize( "file_size, expected_num_chunks, expected_chunk_size", [ ( - TypeAdapter(ByteSize).validate_python("5Mib"), + _BYTE_SIZE_ADAPTER.validate_python("5Mib"), 1, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ( - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), 1, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ( - TypeAdapter(ByteSize).validate_python("20Mib"), + _BYTE_SIZE_ADAPTER.validate_python("20Mib"), 2, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ( - TypeAdapter(ByteSize).validate_python("50Mib"), + _BYTE_SIZE_ADAPTER.validate_python("50Mib"), 5, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ( - TypeAdapter(ByteSize).validate_python("150Mib"), + _BYTE_SIZE_ADAPTER.validate_python("150Mib"), 15, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ( - TypeAdapter(ByteSize).validate_python("550Mib"), + _BYTE_SIZE_ADAPTER.validate_python("550Mib"), 55, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ( - TypeAdapter(ByteSize).validate_python("560Gib"), + _BYTE_SIZE_ADAPTER.validate_python("560Gib"), 5735, - TypeAdapter(ByteSize).validate_python("100Mib"), + _BYTE_SIZE_ADAPTER.validate_python("100Mib"), ), ( - TypeAdapter(ByteSize).validate_python("5Tib"), + _BYTE_SIZE_ADAPTER.validate_python("5Tib"), 8739, - TypeAdapter(ByteSize).validate_python("600Mib"), + _BYTE_SIZE_ADAPTER.validate_python("600Mib"), ), ( - TypeAdapter(ByteSize).validate_python("15Tib"), + _BYTE_SIZE_ADAPTER.validate_python("15Tib"), 7680, - TypeAdapter(ByteSize).validate_python("2Gib"), + _BYTE_SIZE_ADAPTER.validate_python("2Gib"), ), ( - TypeAdapter(ByteSize).validate_python("9431773844"), + _BYTE_SIZE_ADAPTER.validate_python("9431773844"), 900, - TypeAdapter(ByteSize).validate_python("10Mib"), + _BYTE_SIZE_ADAPTER.validate_python("10Mib"), ), ], ids=byte_size_ids, @@ -79,7 +83,7 @@ def test_compute_num_file_chunks( def test_enormous_file_size_raises_value_error(): - enormous_file_size = TypeAdapter(ByteSize).validate_python( + enormous_file_size = _BYTE_SIZE_ADAPTER.validate_python( ( max(_MULTIPART_UPLOADS_TARGET_MAX_PART_SIZE) * _MULTIPART_MAX_NUMBER_OF_PARTS diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py index 50fdb3144a3..a27bb027e94 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/events.py @@ -50,13 +50,6 @@ def from_dask_worker( task_owner=task_owner, ) - @field_validator("progress") - @classmethod - def ensure_between_0_1(cls, v): - if 0 <= v <= 1: - return v - return min(max(0, v), 1) - model_config = ConfigDict( json_schema_extra={ "examples": [ @@ -86,6 +79,13 @@ def ensure_between_0_1(cls, v): } ) + @field_validator("progress") + @classmethod + def ensure_between_0_1(cls, v): + if 0 <= v <= 1: + return v + return min(max(0, v), 1) + LogMessageStr: TypeAlias = str LogLevelInt: TypeAlias = int diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py index 7a18334dae4..bd0185a9173 100644 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ b/packages/models-library/src/models_library/api_schemas_storage.py @@ -8,12 +8,10 @@ from datetime import datetime from enum import Enum -from functools import partial from typing import Annotated, Any, TypeAlias from uuid import UUID from pydantic import ( - AfterValidator, BaseModel, ByteSize, ConfigDict, @@ -26,7 +24,7 @@ ) from pydantic.networks import AnyUrl -from .basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE, validate_re +from .basic_regex import DATCORE_DATASET_NAME_RE, S3_BUCKET_NAME_RE from .basic_types import SHA256Str from .generics import ListModel from .projects_nodes_io import ( @@ -39,9 +37,7 @@ ETag: TypeAlias = str -S3BucketName: TypeAlias = Annotated[ - str, AfterValidator(partial(validate_re, S3_BUCKET_NAME_RE)) -] +S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)] DatCoreDatasetName: TypeAlias = Annotated[ str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE) diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index 269d4b1987a..009215d884a 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -49,7 +49,9 @@ SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)\/$" # S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html] -S3_BUCKET_NAME_RE = r"(?!(^xn--|-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$" +S3_BUCKET_NAME_RE = re.compile( + r"(?!(^xn--|-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$" +) # Datcore file ID DATCORE_FILE_ID_RE = rf"^N:package:{UUID_RE_BASE}$" @@ -77,9 +79,3 @@ ) PROPERTY_KEY_RE = r"^[-_a-zA-Z0-9]+$" # TODO: PC->* it would be advisable to have this "variable friendly" (see VARIABLE_NAME_RE) - - -def validate_re(pattern: str, value: str): - if not re.compile(pattern).match(value): - raise ValueError(f"The {value} doesn't match the {pattern=}.") - return value diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index b75d7ec4170..8710e0f21db 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -6,7 +6,6 @@ - Link to another port: PortLink """ -import re from pathlib import Path from typing import Annotated, TypeAlias from uuid import UUID @@ -52,7 +51,7 @@ class SimcoreS3DirectoryID(ConstrainedStr): `{project_id}/{node_id}/simcore-dir-name/` """ - pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_DIRECTORY_ID_RE) + pattern: str = SIMCORE_S3_DIRECTORY_ID_RE @staticmethod def _get_parent(s3_object: str, *, parent_index: int) -> str: diff --git a/packages/models-library/src/models_library/utils/pydantic_fields_extension.py b/packages/models-library/src/models_library/utils/pydantic_fields_extension.py index e9701c9efeb..f2f6d59a5f4 100644 --- a/packages/models-library/src/models_library/utils/pydantic_fields_extension.py +++ b/packages/models-library/src/models_library/utils/pydantic_fields_extension.py @@ -12,8 +12,7 @@ def get_type(info: FieldInfo) -> Any: def is_literal(info: FieldInfo) -> bool: - origin = get_origin(info.annotation) - return origin is Literal + return get_origin(info.annotation) is Literal def is_nullable(info: FieldInfo) -> bool: diff --git a/packages/models-library/tests/test_utils_pydantic_extension.py b/packages/models-library/tests/test_utils_pydantic_extension.py new file mode 100644 index 00000000000..390874fc995 --- /dev/null +++ b/packages/models-library/tests/test_utils_pydantic_extension.py @@ -0,0 +1,76 @@ +from typing import Literal + +import pytest +from models_library.utils.pydantic_fields_extension import ( + get_type, + is_literal, + is_nullable, +) +from pydantic import BaseModel, Field + + +class MyModel(BaseModel): + a: int + b: float | None = Field(...) + c: str = "bla" + d: bool | None = None + e: Literal["bla"] + + +@pytest.mark.parametrize( + "fn,expected,name", + [ + ( + get_type, + int, + "a", + ), + ( + get_type, + float, + "b", + ), + ( + get_type, + str, + "c", + ), + (get_type, bool, "d"), + ( + is_literal, + False, + "a", + ), + ( + is_literal, + False, + "b", + ), + ( + is_literal, + False, + "c", + ), + (is_literal, False, "d"), + (is_literal, True, "e"), + ( + is_nullable, + False, + "a", + ), + ( + is_nullable, + True, + "b", + ), + ( + is_nullable, + False, + "c", + ), + (is_nullable, True, "d"), + (is_nullable, False, "e"), + ], +) +def test_field_fn(fn, expected, name): + assert expected == fn(MyModel.model_fields[name]) diff --git a/packages/models-library/tests/test_utils_serialization.py b/packages/models-library/tests/test_utils_serialization.py new file mode 100644 index 00000000000..dbf1142021d --- /dev/null +++ b/packages/models-library/tests/test_utils_serialization.py @@ -0,0 +1,34 @@ +import pytest +from models_library.utils.serialization import model_dump_with_secrets +from pydantic import BaseModel, SecretStr + + +class Credentials(BaseModel): + USERNAME: str | None = None + PASSWORD: SecretStr | None = None + + +@pytest.fixture() +def my_credentials() -> Credentials: + return Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) + + +@pytest.mark.parametrize( + "expected,show_secrets", + [ + ( + {"USERNAME": "DeepThought", "PASSWORD": "42"}, + True, + ), + ( + {"USERNAME": "DeepThought", "PASSWORD": "**********"}, + False, # hide secrets + ), + ], +) +def test_model_dump_with_secrets( + my_credentials: Credentials, expected: dict, show_secrets: bool +): + assert expected == model_dump_with_secrets( + my_credentials, show_secrets=show_secrets + ) diff --git a/packages/settings-library/src/settings_library/rabbit.py b/packages/settings-library/src/settings_library/rabbit.py index 2b339755e78..e2cc2e271ce 100644 --- a/packages/settings-library/src/settings_library/rabbit.py +++ b/packages/settings-library/src/settings_library/rabbit.py @@ -1,6 +1,5 @@ from functools import cached_property -from pydantic import TypeAdapter from pydantic.networks import AnyUrl from pydantic.types import SecretStr @@ -15,7 +14,7 @@ class RabbitDsn(AnyUrl): class RabbitSettings(BaseCustomSettings): # host RABBIT_HOST: str - RABBIT_PORT: PortInt = TypeAdapter(PortInt).validate_python(5672) + RABBIT_PORT: PortInt = 5672 RABBIT_SECURE: bool # auth diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index bb009792e67..6bda70f2269 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,14 +1,18 @@ -from pydantic import AnyHttpUrl, Field, SecretStr +from typing import Annotated, Final + +from pydantic import AnyHttpUrl, BeforeValidator, Field, SecretStr, TypeAdapter from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings +_ANY_HTTP_URL_ADAPTER: Final[TypeAdapter] = TypeAdapter(AnyHttpUrl) + class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr - SSM_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + SSM_ENDPOINT: Annotated[ + str, BeforeValidator(lambda x: str(_ANY_HTTP_URL_ADAPTER.validate_python(x))) + ] | None = Field(default=None, description="do not define if using standard AWS") SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr From 0a1a1a454dd04eff5d03e678b2d1a484161c6e73 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 14:17:44 +0200 Subject: [PATCH 177/280] continue upgrading --- .../src/models_library/projects_nodes_io.py | 11 +++++---- .../models_library/service_settings_labels.py | 9 +++----- .../service_settings_nat_rule.py | 11 ++------- .../tests/test_osparc_variable_identifier.py | 23 +++++++++++-------- .../tests/test_utils_serialization.py | 14 ++++------- .../utils_folders.py | 2 +- .../aiohttp/long_running_tasks/_server.py | 12 ++++++---- .../fastapi/long_running_tasks/_client.py | 5 ++-- 8 files changed, 42 insertions(+), 45 deletions(-) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 8710e0f21db..6b59d356e8a 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -12,9 +12,9 @@ from models_library.basic_types import ConstrainedStr, KeyIDStr from pydantic import ( - AfterValidator, AnyUrl, BaseModel, + BeforeValidator, ConfigDict, Field, StringConstraints, @@ -45,6 +45,9 @@ ] +_ANY_URL_ADAPTER: TypeAdapter[AnyUrl] = TypeAdapter(AnyUrl) + + class SimcoreS3DirectoryID(ConstrainedStr): """ A simcore directory has the following structure: @@ -122,9 +125,9 @@ class PortLink(BaseModel): class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" - download_link: Annotated[AnyUrl, AfterValidator(str)] = Field( - ..., alias="downloadLink" - ) + download_link: Annotated[ + str, BeforeValidator(lambda x: str(_ANY_URL_ADAPTER.validate_python(x))) + ] = Field(..., alias="downloadLink") label: str | None = Field(default=None, description="Display name") model_config = ConfigDict( extra="forbid", diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index abc308dcfb4..851b1880cc3 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -43,8 +43,7 @@ class ContainerSpec(BaseModel): max_length=2, ) - model_config = ConfigDict( - **_BaseConfig, + model_config = _BaseConfig | ConfigDict( json_schema_extra={ "examples": [ {"Command": ["executable"]}, @@ -102,8 +101,7 @@ def ensure_backwards_compatible_setting_type(cls, v): return "Resources" return v - model_config = ConfigDict( - **_BaseConfig, + model_config = _BaseConfig | ConfigDict( populate_by_name=True, json_schema_extra={ "examples": [ @@ -221,8 +219,7 @@ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None: output: str | None = v return output - model_config = ConfigDict( - **_BaseConfig, + model_config = _BaseConfig | ConfigDict( json_schema_extra={ "examples": [ { diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py index 9864fbfae88..3e193397821 100644 --- a/packages/models-library/src/models_library/service_settings_nat_rule.py +++ b/packages/models-library/src/models_library/service_settings_nat_rule.py @@ -1,21 +1,14 @@ from collections.abc import Generator from typing import Final -from pydantic import ( - BaseModel, - ConfigDict, - Field, - TypeAdapter, - ValidationInfo, - field_validator, -) +from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator from .basic_types import PortInt from .osparc_variable_identifier import OsparcVariableIdentifier, raise_if_unresolved # Cloudflare DNS server address DEFAULT_DNS_SERVER_ADDRESS: Final[str] = "1.1.1.1" # NOSONAR -DEFAULT_DNS_SERVER_PORT: Final[PortInt] = TypeAdapter(PortInt).validate_python(53) +DEFAULT_DNS_SERVER_PORT: Final[PortInt] = 53 class _PortRange(BaseModel): diff --git a/packages/models-library/tests/test_osparc_variable_identifier.py b/packages/models-library/tests/test_osparc_variable_identifier.py index af32dd7c42c..cb23b19f60a 100644 --- a/packages/models-library/tests/test_osparc_variable_identifier.py +++ b/packages/models-library/tests/test_osparc_variable_identifier.py @@ -41,6 +41,11 @@ ] +_OSPARC_VARIABLE_IDENTIFIER_ADAPTER: TypeAdapter[ + OsparcVariableIdentifier +] = TypeAdapter(OsparcVariableIdentifier) + + @pytest.fixture(params=VALID_IDENTIFIERS) def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: return request.param @@ -50,7 +55,7 @@ def osparc_variable_identifier_str(request: pytest.FixtureRequest) -> str: def identifier( osparc_variable_identifier_str: str, ) -> OsparcVariableIdentifier: - return TypeAdapter(OsparcVariableIdentifier).validate_python( + return _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( osparc_variable_identifier_str ) @@ -58,7 +63,7 @@ def identifier( @pytest.mark.parametrize("invalid_var_name", INVALID_IDENTIFIERS) def test_osparc_variable_identifier_does_not_validate(invalid_var_name: str): with pytest.raises(ValidationError): - TypeAdapter(OsparcVariableIdentifier).validate_python(invalid_var_name) + _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python(invalid_var_name) def test_raise_if_unresolved(identifier: OsparcVariableIdentifier): @@ -78,17 +83,17 @@ class Example(BaseModel): @pytest.mark.parametrize( "object_template", [ - TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1"), - [TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1")], - (TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1"),), - {TypeAdapter(OsparcVariableIdentifier).validate_python("$OSPARC_VARIABLE_1")}, + _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1"), + [_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1")], + (_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1"),), + {_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python("$OSPARC_VARIABLE_1")}, { - "test": TypeAdapter(OsparcVariableIdentifier).validate_python( + "test": _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( "$OSPARC_VARIABLE_1" ) }, Example( - nested_objects=TypeAdapter(OsparcVariableIdentifier).validate_python( + nested_objects=_OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( "$OSPARC_VARIABLE_1" ) ), @@ -155,7 +160,7 @@ def test_osparc_variable_name_and_default_value( expected_osparc_variable_name: str, expected_default_value: str | None, ): - osparc_variable_identifer = TypeAdapter(OsparcVariableIdentifier).validate_python( + osparc_variable_identifer = _OSPARC_VARIABLE_IDENTIFIER_ADAPTER.validate_python( str_identifier ) assert osparc_variable_identifer.name == expected_osparc_variable_name diff --git a/packages/models-library/tests/test_utils_serialization.py b/packages/models-library/tests/test_utils_serialization.py index dbf1142021d..3a55de6026b 100644 --- a/packages/models-library/tests/test_utils_serialization.py +++ b/packages/models-library/tests/test_utils_serialization.py @@ -1,3 +1,5 @@ +from typing import Final + import pytest from models_library.utils.serialization import model_dump_with_secrets from pydantic import BaseModel, SecretStr @@ -8,9 +10,7 @@ class Credentials(BaseModel): PASSWORD: SecretStr | None = None -@pytest.fixture() -def my_credentials() -> Credentials: - return Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) +ME: Final[Credentials] = Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) @pytest.mark.parametrize( @@ -26,9 +26,5 @@ def my_credentials() -> Credentials: ), ], ) -def test_model_dump_with_secrets( - my_credentials: Credentials, expected: dict, show_secrets: bool -): - assert expected == model_dump_with_secrets( - my_credentials, show_secrets=show_secrets - ) +def test_model_dump_with_secrets(expected: dict, show_secrets: bool): + assert expected == model_dump_with_secrets(ME, show_secrets=show_secrets) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index 16aed6ea035..1abb93a1f5b 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -310,7 +310,7 @@ def _get_filter_for_enabled_permissions( class FolderEntry(BaseModel): id: _FolderID - parent_folder: _FolderID | None = Field(None, alias="traversal_parent_id") + parent_folder: _FolderID | None = Field(alias="traversal_parent_id") name: str description: str owner: _GroupID = Field(alias="created_by") diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index 2d09a65846f..8e20e5950dd 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -28,6 +28,8 @@ _logger = logging.getLogger(__name__) +_ANY_HTTP_URL_ADAPTER: TypeAdapter[AnyHttpUrl] = TypeAdapter(AnyHttpUrl) + def no_ops_decorator(handler: Handler): return handler @@ -67,13 +69,13 @@ async def start_long_running_task( ip_addr, port = request_.transport.get_extra_info( "sockname" ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info - status_url = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" # NOSONAR + status_url = _ANY_HTTP_URL_ADAPTER.validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" # NOSONAR ) - result_url = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR + result_url = _ANY_HTTP_URL_ADAPTER.validate_python( + f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR ) - abort_url = TypeAdapter(AnyHttpUrl).validate_python( + abort_url = _ANY_HTTP_URL_ADAPTER.validate_python( f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" # NOSONAR ) task_get = TaskGet( diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index 38465d6487b..f2ad04c9669 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -23,6 +23,8 @@ DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 +_ANY_HTTP_URL_ADAPTER: TypeAdapter[AnyHttpUrl] = TypeAdapter(AnyHttpUrl) + logger = logging.getLogger(__name__) @@ -129,10 +131,9 @@ def _client_configuration(self) -> ClientConfiguration: return output def _get_url(self, path: str) -> AnyHttpUrl: - output: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( + return _ANY_HTTP_URL_ADAPTER.validate_python( f"{self._base_url}{self._client_configuration.router_prefix}{path}", ) - return output @retry_on_http_errors async def get_task_status( From 32622e0d41a359f2d1ec7c6018b51903251f2fd1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 14:21:25 +0200 Subject: [PATCH 178/280] fix mypy --- packages/models-library/src/models_library/projects.py | 2 +- packages/models-library/src/models_library/projects_nodes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 9a469145eec..43f75113b7b 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -182,5 +182,5 @@ class Project(BaseProjectModel): model_config = ConfigDict( title="osparc-simcore project", extra="forbid", - json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item] + json_schema_extra=_patch_json_schema_extra, ) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 3ce49cf7491..3a6ea052313 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -246,5 +246,5 @@ def convert_from_enum(cls, v): model_config = ConfigDict( extra="forbid", - json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item] + json_schema_extra=_patch_json_schema_extra, ) From 9a6df8014e171ebc380203ad227fe52a8508c6af Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 14:29:21 +0200 Subject: [PATCH 179/280] move adapter out --- .../rabbitmq/rpc_interfaces/dynamic_scheduler/services.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index 9bf8e262611..3dcc9ed502f 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -26,6 +26,8 @@ DEFAULT_LEGACY_WB_TO_DV2_HTTP_REQUESTS_TIMEOUT_S * 2 ) +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_service_status( @@ -33,7 +35,7 @@ async def get_service_status( ) -> NodeGetIdle | DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_service_status"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_service_status"), node_id=node_id, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -49,7 +51,7 @@ async def run_dynamic_service( ) -> DynamicServiceGet | NodeGet: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("run_dynamic_service"), + _RPC_METHOD_NAME_ADAPTER.validate_python("run_dynamic_service"), dynamic_service_start=dynamic_service_start, timeout_s=_RPC_DEFAULT_TIMEOUT_S, ) @@ -66,7 +68,7 @@ async def stop_dynamic_service( ) -> None: result = await rabbitmq_rpc_client.request( DYNAMIC_SCHEDULER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("stop_dynamic_service"), + _RPC_METHOD_NAME_ADAPTER.validate_python("stop_dynamic_service"), dynamic_service_stop=dynamic_service_stop, timeout_s=timeout_s, ) From 8e53f7a0751fa357ca994f5a9fb70833b0c2e6eb Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 14:44:45 +0200 Subject: [PATCH 180/280] move adapter out --- .../resource_usage_tracker/pricing_plans.py | 14 ++++++++------ .../resource_usage_tracker/pricing_units.py | 8 +++++--- .../resource_usage_tracker/service_runs.py | 8 +++++--- .../simcore_sdk/node_ports_common/r_clone_utils.py | 2 ++ 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py index a2c9259aa0b..218cd139fb4 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_plans.py @@ -26,6 +26,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_pricing_plan( @@ -36,7 +38,7 @@ async def get_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, timeout_s=_DEFAULT_TIMEOUT_S, @@ -53,7 +55,7 @@ async def list_pricing_plans( ) -> list[PricingPlanGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("list_pricing_plans"), + _RPC_METHOD_NAME_ADAPTER.validate_python("list_pricing_plans"), product_name=product_name, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -69,7 +71,7 @@ async def create_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("create_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_plan"), data=data, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -86,7 +88,7 @@ async def update_pricing_plan( ) -> PricingPlanGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("update_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_plan"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -104,7 +106,7 @@ async def list_connected_services_to_pricing_plan_by_pricing_plan( ) -> list[PricingPlanToServiceGet]: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python( + _RPC_METHOD_NAME_ADAPTER.validate_python( "list_connected_services_to_pricing_plan_by_pricing_plan" ), product_name=product_name, @@ -126,7 +128,7 @@ async def connect_service_to_pricing_plan( ) -> PricingPlanToServiceGet: result: PricingPlanGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("connect_service_to_pricing_plan"), + _RPC_METHOD_NAME_ADAPTER.validate_python("connect_service_to_pricing_plan"), product_name=product_name, pricing_plan_id=pricing_plan_id, service_key=service_key, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py index 9851c55bc11..afa5611a92d 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/pricing_units.py @@ -25,6 +25,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_pricing_unit( @@ -36,7 +38,7 @@ async def get_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_pricing_unit"), product_name=product_name, pricing_plan_id=pricing_plan_id, pricing_unit_id=pricing_unit_id, @@ -55,7 +57,7 @@ async def create_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("create_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("create_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, @@ -73,7 +75,7 @@ async def update_pricing_unit( ) -> PricingUnitGet: result: PricingUnitGet = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("update_pricing_unit"), + _RPC_METHOD_NAME_ADAPTER.validate_python("update_pricing_unit"), product_name=product_name, data=data, timeout_s=_DEFAULT_TIMEOUT_S, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py index efc04b2dba6..ad7b2fd908b 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py @@ -28,6 +28,8 @@ _DEFAULT_TIMEOUT_S: Final[NonNegativeInt] = 20 +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + @log_decorator(_logger, level=logging.DEBUG) async def get_service_run_page( @@ -44,7 +46,7 @@ async def get_service_run_page( ) -> ServiceRunPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_service_run_page"), + _RPC_METHOD_NAME_ADAPTER.validate_python("get_service_run_page"), user_id=user_id, product_name=product_name, limit=limit, @@ -74,7 +76,7 @@ async def get_osparc_credits_aggregated_usages_page( ) -> OsparcCreditsAggregatedUsagesPage: result = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python( + _RPC_METHOD_NAME_ADAPTER.validate_python( "get_osparc_credits_aggregated_usages_page" ), user_id=user_id, @@ -104,7 +106,7 @@ async def export_service_runs( ) -> AnyUrl: result: AnyUrl = await rabbitmq_rpc_client.request( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("export_service_runs"), + _RPC_METHOD_NAME_ADAPTER.validate_python("export_service_runs"), user_id=user_id, product_name=product_name, wallet_id=wallet_id, diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index a88dfaf203d..75ed54ec686 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -12,6 +12,8 @@ _logger = logging.getLogger(__name__) + + class _RCloneSyncMessageBase(BaseModel): level: str = Field(..., description="log level") msg: str From 9f1c18c9122a60090fa764b6d24d9dd886a7608d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 14:48:59 +0200 Subject: [PATCH 181/280] simplify regex --- packages/models-library/src/models_library/basic_regex.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index 009215d884a..b65c0fd1fe1 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -50,7 +50,7 @@ # S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html] S3_BUCKET_NAME_RE = re.compile( - r"(?!(^xn--|-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$" + r"^(?!xn--)[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$(? Date: Wed, 2 Oct 2024 15:18:19 +0200 Subject: [PATCH 182/280] fix httpx client --- .../helpers/httpx_client_base_dev.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py index d9b5bb64437..9a36d4cc020 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py @@ -6,7 +6,7 @@ from fastapi.encoders import jsonable_encoder from httpx._types import URLTypes from jsonschema import ValidationError -from pydantic import parse_file_as +from pydantic import TypeAdapter from .httpx_calls_capture_errors import CaptureProcessingError from .httpx_calls_capture_models import HttpApiCallCaptureModel, get_captured_model @@ -14,6 +14,11 @@ _logger = logging.getLogger(__name__) +_HTTP_API_CALL_CAPTURE_MODEL_ADAPTER: TypeAdapter[ + list[HttpApiCallCaptureModel] +] = TypeAdapter(list[HttpApiCallCaptureModel]) + + class AsyncClientCaptureWrapper(httpx.AsyncClient): """ Adds captures mechanism @@ -41,8 +46,11 @@ async def request(self, method: str, url: URLTypes, **kwargs): or self._capture_file.read_text().strip() == "" ): self._capture_file.write_text("[]") - serialized_captures: list[HttpApiCallCaptureModel] = parse_file_as( - list[HttpApiCallCaptureModel], self._capture_file + + serialized_captures: list[ + HttpApiCallCaptureModel + ] = _HTTP_API_CALL_CAPTURE_MODEL_ADAPTER.validate_json( + self._capture_file.read_text() ) serialized_captures.append(capture) self._capture_file.write_text( From 7ffe2476e1a3b2ea38fdace77317fde8c1441959 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 15:53:25 +0200 Subject: [PATCH 183/280] upgrade --- .../api/routes/solvers.py | 2 -- .../core/application.py | 2 +- .../models/api_resources.py | 13 +++++----- .../models/basic_types.py | 15 +++++------- .../models/pagination.py | 2 +- .../models/schemas/files.py | 15 ++++++------ .../services/catalog.py | 24 +++++++++++++++---- .../services/director_v2.py | 10 ++++---- .../tests/unit/_with_db/conftest.py | 2 +- .../test_api_routers_solvers_jobs_delete.py | 17 +++++++------ .../test_api_routers_solvers_jobs_metadata.py | 6 ++--- .../test_api_routers_solvers_jobs_read.py | 11 ++++----- .../test_api_routers_studies_jobs_metadata.py | 11 +++++---- .../api_studies/test_api_routes_studies.py | 16 ++++++------- .../unit/captures/test__mocks_captures.py | 8 +++---- 15 files changed, 83 insertions(+), 71 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers.py index 18e23820826..83e1289698a 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers.py @@ -7,7 +7,6 @@ from httpx import HTTPStatusError from models_library.api_schemas_api_server.pricing_plans import ServicePricingPlanGet from pydantic import ValidationError -from pydantic.errors import PydanticValueError from ...exceptions.service_errors_utils import DEFAULT_BACKEND_SERVICE_STATUS_CODES from ...models.basic_types import VersionStr @@ -230,7 +229,6 @@ async def get_solver_release( IndexError, ValidationError, HTTPStatusError, - PydanticValueError, ) as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 82612bfe86c..30560b924b0 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -53,7 +53,7 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: config_all_loggers( log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED ) - _logger.debug("App settings:\n%s", settings.json(indent=2)) + _logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) # Labeling title = "osparc.io public API" diff --git a/services/api-server/src/simcore_service_api_server/models/api_resources.py b/services/api-server/src/simcore_service_api_server/models/api_resources.py index 88f57e41f5e..e66554fc011 100644 --- a/services/api-server/src/simcore_service_api_server/models/api_resources.py +++ b/services/api-server/src/simcore_service_api_server/models/api_resources.py @@ -1,9 +1,8 @@ -import re import urllib.parse -from typing import Any +from typing import Annotated, Any, TypeAlias -from pydantic import BaseModel, ConfigDict, Field -from pydantic.types import ConstrainedStr +from pydantic import BaseModel, Field +from pydantic.types import StringConstraints # RESOURCE NAMES https://cloud.google.com/apis/design/resource_names # @@ -30,9 +29,9 @@ _RELATIVE_RESOURCE_NAME_RE = r"^([^\s/]+/?){1,10}$" -class RelativeResourceName(ConstrainedStr): - regex = re.compile(_RELATIVE_RESOURCE_NAME_RE) - model_config = ConfigDict(frozen=True) +RelativeResourceName: TypeAlias = Annotated[ + str, StringConstraints(pattern=_RELATIVE_RESOURCE_NAME_RE) +] # NOTE: we quote parts in a single resource_name and unquote when split diff --git a/services/api-server/src/simcore_service_api_server/models/basic_types.py b/services/api-server/src/simcore_service_api_server/models/basic_types.py index 53ea6fe31ce..8e0c4c79af2 100644 --- a/services/api-server/src/simcore_service_api_server/models/basic_types.py +++ b/services/api-server/src/simcore_service_api_server/models/basic_types.py @@ -1,17 +1,14 @@ -import re +from typing import Annotated, TypeAlias from fastapi.responses import StreamingResponse from models_library.basic_regex import SIMPLE_VERSION_RE -from pydantic import ConstrainedStr +from pydantic import StringConstraints +VersionStr: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=SIMPLE_VERSION_RE) +] -class VersionStr(ConstrainedStr): - strip_whitespace = True - regex = re.compile(SIMPLE_VERSION_RE) - - -class FileNameStr(ConstrainedStr): - strip_whitespace = True +FileNameStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True)] class LogStreamingResponse(StreamingResponse): diff --git a/services/api-server/src/simcore_service_api_server/models/pagination.py b/services/api-server/src/simcore_service_api_server/models/pagination.py index bbe99af5bbf..f35969592cc 100644 --- a/services/api-server/src/simcore_service_api_server/models/pagination.py +++ b/services/api-server/src/simcore_service_api_server/models/pagination.py @@ -18,7 +18,7 @@ MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, ) from models_library.utils.pydantic_tools_extension import FieldNotRequired -from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt +from pydantic import BaseModel, ConfigDict, Field, NonNegativeInt, field_validator T = TypeVar("T") diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/files.py b/services/api-server/src/simcore_service_api_server/models/schemas/files.py index 9ac6c252899..42686f0876e 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/files.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/files.py @@ -17,8 +17,9 @@ ConfigDict, Field, StringConstraints, + TypeAdapter, + ValidationInfo, field_validator, - parse_obj_as, ) from servicelib.file_utils import create_sha256_checksum @@ -79,9 +80,9 @@ class File(BaseModel): @field_validator("content_type", mode="before") @classmethod - def guess_content_type(cls, v, values): + def guess_content_type(cls, v, info: ValidationInfo): if v is None: - filename = values.get("filename") + filename = info.data.get("filename") if filename: mime_content_type, _ = guess_type(filename, strict=False) return mime_content_type @@ -136,8 +137,8 @@ async def create_from_client_file( @classmethod async def create_from_quoted_storage_id(cls, quoted_storage_id: str) -> "File": - storage_file_id: StorageFileID = parse_obj_as( - StorageFileID, _unquote(quoted_storage_id) # type: ignore[arg-type] + storage_file_id: StorageFileID = TypeAdapter(StorageFileID).validate_python( + _unquote(quoted_storage_id) ) _, fid, fname = Path(storage_file_id).parts return cls(id=UUID(fid), filename=fname, checksum=None) @@ -149,8 +150,8 @@ def create_id(cls, *keys) -> UUID: @property def storage_file_id(self) -> StorageFileID: """Get the StorageFileId associated with this file""" - return parse_obj_as( - StorageFileID, f"api/{self.id}/{self.filename}" # type: ignore[arg-type] + return TypeAdapter(StorageFileID).validate_python( + f"api/{self.id}/{self.filename}" ) @property diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py index 43823cf9d36..758fe44792a 100644 --- a/services/api-server/src/simcore_service_api_server/services/catalog.py +++ b/services/api-server/src/simcore_service_api_server/services/catalog.py @@ -9,7 +9,7 @@ from fastapi import FastAPI, status from models_library.emails import LowerCaseEmailStr from models_library.services import ServiceMetaDataPublished, ServiceType -from pydantic import ConfigDict, ValidationError, parse_obj_as, parse_raw_as +from pydantic import ConfigDict, TypeAdapter, ValidationError from settings_library.catalog import CatalogSettings from simcore_service_api_server.exceptions.backend_errors import ( ListSolversOrStudiesError, @@ -68,6 +68,17 @@ def to_solver(self) -> Solver: _exception_mapper = partial(service_exception_mapper, "Catalog") +_TRUNCATED_CATALOG_SERVICE_OUT_ADAPTER: TypeAdapter[ + TruncatedCatalogServiceOut +] = TypeAdapter(TruncatedCatalogServiceOut) +_LIST_OF_TRUNCATED_CATALOG_SERVICE_OUT_ADAPTER: TypeAdapter[ + list[TruncatedCatalogServiceOut] +] = TypeAdapter(list[TruncatedCatalogServiceOut]) + + +def _parse_response(type_adapter: TypeAdapter, response): + return type_adapter.validate_json(response.text) + @dataclass class CatalogApi(BaseServiceClientApi): @@ -97,7 +108,10 @@ async def list_solvers( services: list[ TruncatedCatalogServiceOut ] = await asyncio.get_event_loop().run_in_executor( - None, parse_raw_as, list[TruncatedCatalogServiceOut], response.text + None, + _parse_response, + _LIST_OF_TRUNCATED_CATALOG_SERVICE_OUT_ADAPTER, + response.text, ) solvers = [] for service in services: @@ -113,7 +127,7 @@ async def list_solvers( # invalid items instead of returning error _logger.warning( "Skipping invalid service returned by catalog '%s': %s", - service.json(), + service.model_dump_json(), err, ) return solvers @@ -138,7 +152,7 @@ async def get_service( service: ( TruncatedCatalogServiceOut ) = await asyncio.get_event_loop().run_in_executor( - None, parse_raw_as, TruncatedCatalogServiceOut, response.text + None, _parse_response, _TRUNCATED_CATALOG_SERVICE_OUT_ADAPTER, response.text ) assert ( # nosec service.service_type == ServiceType.COMPUTATIONAL @@ -165,7 +179,7 @@ async def get_service_ports( response.raise_for_status() - return parse_obj_as(list[SolverPort], response.json()) + return TypeAdapter(list[SolverPort]).validate_python(response.json()) async def list_latest_releases( self, *, user_id: int, product_name: str diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index 1eafd21cea9..f2a9eca3018 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -14,7 +14,7 @@ ConfigDict, Field, PositiveInt, - parse_raw_as, + TypeAdapter, ) from simcore_service_api_server.exceptions.backend_errors import ( JobNotFoundError, @@ -47,8 +47,8 @@ class ComputationTaskGet(ComputationTask): def guess_progress(self) -> PercentageInt: # guess progress based on self.state if self.state in [RunningState.SUCCESS, RunningState.FAILED]: - return PercentageInt(100) - return PercentageInt(0) + return 100 + return 0 model_config = ConfigDict( json_schema_extra={ @@ -187,7 +187,9 @@ async def get_computation_logs( response.raise_for_status() log_links: list[LogLink] = [] - for r in parse_raw_as(list[TaskLogFileGet], response.text or "[]"): + for r in TypeAdapter(list[TaskLogFileGet]).validate_python( + response.text or "[]" + ): if r.download_link: log_links.append( LogLink(node_name=f"{r.task_id}", download_link=r.download_link) diff --git a/services/api-server/tests/unit/_with_db/conftest.py b/services/api-server/tests/unit/_with_db/conftest.py index 57450561ce4..89cf5f22344 100644 --- a/services/api-server/tests/unit/_with_db/conftest.py +++ b/services/api-server/tests/unit/_with_db/conftest.py @@ -161,7 +161,7 @@ def app_environment( assert "API_SERVER_POSTGRES" not in envs # Should be sufficient to create settings - print(PostgresSettings.create_from_envs().json(indent=1)) + print(PostgresSettings.create_from_envs().model_dump_json(indent=1)) return envs diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py index 000a586836a..bdb3886ebec 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py @@ -11,7 +11,7 @@ import pytest from faker import Faker from models_library.basic_regex import UUID_RE_BASE -from pydantic import parse_file_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter from servicelib.common_headers import ( @@ -42,7 +42,7 @@ def mocked_backend_services_apis_for_delete_non_existing_project( template = environment.get_template(mock_name) def _response(request: httpx.Request, project_id: str): - capture = HttpApiCallCaptureModel.parse_raw( + capture = HttpApiCallCaptureModel.model_validate_json( template.render(project_id=project_id) ) return httpx.Response( @@ -91,9 +91,8 @@ def mocked_backend_services_apis_for_create_and_delete_solver_job( mock_name = "on_create_job.json" # fixture - captures = parse_file_as( - list[HttpApiCallCaptureModel], - project_tests_dir / "mocks" / mock_name, + captures = TypeAdapter(list[HttpApiCallCaptureModel]).validate_json( + Path(project_tests_dir / "mocks" / mock_name).read_text() ) capture = captures[0] @@ -137,10 +136,10 @@ async def test_create_and_delete_solver_job( "x": 3.14, "n": 42, } - ).dict(), + ).model_dump(), ) assert resp.status_code == status.HTTP_201_CREATED - job = Job.parse_obj(resp.json()) + job = Job.model_validate(resp.json()) # Delete Job after creation resp = await client.delete( @@ -224,7 +223,7 @@ def create_project_side_effect(request: httpx.Request): "x": 3.14, "n": 42, } - ).dict(), + ).model_dump(), ) assert resp.status_code == status.HTTP_201_CREATED - job = Job.parse_obj(resp.json()) + job = Job.model_validate(resp.json()) diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py index 8afb38ca86e..6b62c89b6b8 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_metadata.py @@ -10,7 +10,7 @@ import pytest from faker import Faker from models_library.basic_regex import UUID_RE_BASE -from pydantic import parse_file_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter from simcore_service_api_server._meta import API_VTAG @@ -45,8 +45,8 @@ def mocked_backend( captures = { c.name: c - for c in parse_file_as( - list[HttpApiCallCaptureModel], project_tests_dir / "mocks" / mock_name + for c in TypeAdapter(list[HttpApiCallCaptureModel]).validate_json( + Path(project_tests_dir / "mocks" / mock_name).read_text() ) } diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py index 1dbf8b3fa0f..b51c580eb82 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_read.py @@ -7,7 +7,7 @@ import httpx import pytest -from pydantic import parse_file_as, parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter from simcore_service_api_server._meta import API_VTAG @@ -28,9 +28,8 @@ def mocked_backend( project_tests_dir: Path, ) -> MockBackendRouters: mock_name = "on_list_jobs.json" - captures = parse_file_as( - list[HttpApiCallCaptureModel], - project_tests_dir / "mocks" / mock_name, + captures = TypeAdapter(list[HttpApiCallCaptureModel]).validate_json( + Path(project_tests_dir / "mocks" / mock_name).read_text() ) capture = captures[0] @@ -78,7 +77,7 @@ async def test_list_solver_jobs( f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs", auth=auth ) assert resp.status_code == status.HTTP_200_OK - jobs = parse_obj_as(list[Job], resp.json()) + jobs = TypeAdapter(list[Job]).validate_python(resp.json()) # list jobs (w/ pagination) resp = await client.get( @@ -88,7 +87,7 @@ async def test_list_solver_jobs( ) assert resp.status_code == status.HTTP_200_OK - jobs_page = parse_obj_as(Page[Job], resp.json()) + jobs_page = TypeAdapter(Page[Job]).validate_python(resp.json()) assert jobs_page.items == jobs diff --git a/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py b/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py index 098718c3738..d1fae307589 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py +++ b/services/api-server/tests/unit/api_studies/test_api_routers_studies_jobs_metadata.py @@ -12,7 +12,7 @@ import httpx import pytest from fastapi.encoders import jsonable_encoder -from pydantic import parse_file_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from pytest_simcore.helpers.httpx_calls_capture_parameters import PathDescription from respx import MockRouter @@ -38,9 +38,12 @@ def mocked_backend( # load captures = { c.name: c - for c in parse_file_as( - list[HttpApiCallCaptureModel], - project_tests_dir / "mocks" / "test_get_and_update_study_job_metadata.json", + for c in TypeAdapter(list[HttpApiCallCaptureModel]).validate_json( + Path( + project_tests_dir + / "mocks" + / "test_get_and_update_study_job_metadata.json" + ).read_text(), ) } diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py index 1893e6e068e..d5369bb0314 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py @@ -12,7 +12,7 @@ import pytest from faker import Faker from fastapi import status -from pydantic import parse_file_as, parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from respx import MockRouter from servicelib.common_headers import ( @@ -40,8 +40,8 @@ def mocked_backend( captures = { c.name: c - for c in parse_file_as( - list[HttpApiCallCaptureModel], project_tests_dir / "mocks" / mock_name + for c in TypeAdapter(list[HttpApiCallCaptureModel]).validate_json( + Path(project_tests_dir / "mocks" / mock_name).read_text() ) } @@ -84,7 +84,7 @@ async def test_studies_read_workflow( resp = await client.get(f"/{API_VTAG}/studies", auth=auth) assert resp.status_code == status.HTTP_200_OK - studies = parse_obj_as(list[Study], resp.json()["items"]) + studies = TypeAdapter(list[Study]).validate_python(resp.json()["items"]) assert len(studies) == 1 assert studies[0].uid == study_id @@ -96,28 +96,28 @@ async def test_studies_read_workflow( resp = await client.get(f"/{API_VTAG}/studies/{study_id}", auth=auth) assert resp.status_code == status.HTTP_200_OK - study = parse_obj_as(Study, resp.json()) + study = TypeAdapter(Study).validate_python(resp.json()) assert study.uid == study_id # get ports resp = await client.get(f"/{API_VTAG}/studies/{study_id}/ports", auth=auth) assert resp.status_code == status.HTTP_200_OK - ports = parse_obj_as(list[StudyPort], resp.json()["items"]) + ports = TypeAdapter(list[StudyPort]).validate_python(resp.json()["items"]) assert len(ports) == (resp.json()["total"]) # get_study with non-existing uuid inexistent_study_id = StudyID("15531b1a-2565-11ee-ab43-02420a000031") resp = await client.get(f"/{API_VTAG}/studies/{inexistent_study_id}", auth=auth) assert resp.status_code == status.HTTP_404_NOT_FOUND - error = parse_obj_as(ErrorGet, resp.json()) + error = TypeAdapter(ErrorGet).validate_python(resp.json()) assert f"{inexistent_study_id}" in error.errors[0] resp = await client.get( f"/{API_VTAG}/studies/{inexistent_study_id}/ports", auth=auth ) assert resp.status_code == status.HTTP_404_NOT_FOUND - error = parse_obj_as(ErrorGet, resp.json()) + error = TypeAdapter(ErrorGet).validate_python(resp.json()) assert f"{inexistent_study_id}" in error.errors[0] diff --git a/services/api-server/tests/unit/captures/test__mocks_captures.py b/services/api-server/tests/unit/captures/test__mocks_captures.py index 4c04cca224d..81297e1bbe5 100644 --- a/services/api-server/tests/unit/captures/test__mocks_captures.py +++ b/services/api-server/tests/unit/captures/test__mocks_captures.py @@ -14,7 +14,7 @@ import jsonref import pytest import respx -from pydantic import parse_file_as +from pydantic import TypeAdapter from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from pytest_simcore.helpers.httpx_calls_capture_openapi import _determine_path from pytest_simcore.helpers.httpx_calls_capture_parameters import ( @@ -86,9 +86,9 @@ def test_openapion_capture_mock( assert mock_capture_path.exists() assert mock_capture_path.name.endswith(".json") - captures = parse_file_as( - list[HttpApiCallCaptureModel] | HttpApiCallCaptureModel, mock_capture_path - ) + captures = TypeAdapter( + list[HttpApiCallCaptureModel] | HttpApiCallCaptureModel + ).validate_json(mock_capture_path.read_text()) if not isinstance(captures, list): captures = [ From a697a3f5a4496ccc4a68afe0a720fd0149fc4df8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 16:20:02 +0200 Subject: [PATCH 184/280] fix mypy --- .../core/settings.py | 21 +++++++++---- .../models/api_resources.py | 7 +++-- .../services/solver_job_models_converters.py | 30 ++++++++++--------- 3 files changed, 35 insertions(+), 23 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index dfc97394199..d717258bad4 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -79,18 +79,27 @@ class ApplicationSettings(BasicSettings): # DOCKER BOOT SC_BOOT_MODE: BootModeEnum | None - API_SERVER_POSTGRES: PostgresSettings | None = Field(auto_default_from_env=True) + API_SERVER_POSTGRES: PostgresSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) API_SERVER_RABBITMQ: RabbitSettings | None = Field( - auto_default_from_env=True, description="settings for service/rabbitmq" + json_schema_extra={"auto_default_from_env": True}, + description="settings for service/rabbitmq", ) # SERVICES with http API - API_SERVER_WEBSERVER: WebServerSettings | None = Field(auto_default_from_env=True) - API_SERVER_CATALOG: CatalogSettings | None = Field(auto_default_from_env=True) - API_SERVER_STORAGE: StorageSettings | None = Field(auto_default_from_env=True) + API_SERVER_WEBSERVER: WebServerSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) + API_SERVER_CATALOG: CatalogSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) + API_SERVER_STORAGE: StorageSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) API_SERVER_DIRECTOR_V2: DirectorV2Settings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) API_SERVER_LOG_CHECK_TIMEOUT_SECONDS: NonNegativeInt = 3 * 60 API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True diff --git a/services/api-server/src/simcore_service_api_server/models/api_resources.py b/services/api-server/src/simcore_service_api_server/models/api_resources.py index e66554fc011..aae7531a837 100644 --- a/services/api-server/src/simcore_service_api_server/models/api_resources.py +++ b/services/api-server/src/simcore_service_api_server/models/api_resources.py @@ -1,7 +1,8 @@ +import re import urllib.parse from typing import Annotated, Any, TypeAlias -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, TypeAdapter from pydantic.types import StringConstraints # RESOURCE NAMES https://cloud.google.com/apis/design/resource_names @@ -38,7 +39,7 @@ def parse_last_resource_id(resource_name: RelativeResourceName) -> str: - if match := RelativeResourceName.regex.match(resource_name): + if match := re.match(_RELATIVE_RESOURCE_NAME_RE, resource_name): last_quoted_part = match.group(1) return urllib.parse.unquote_plus(last_quoted_part) msg = f"Invalid '{resource_name=}' does not match RelativeResourceName" @@ -50,7 +51,7 @@ def compose_resource_name(*collection_or_resource_ids) -> RelativeResourceName: urllib.parse.quote_plus(f"{_id}".lstrip("/")) for _id in collection_or_resource_ids ] - return RelativeResourceName("/".join(quoted_parts)) + return TypeAdapter(RelativeResourceName).validate_python("/".join(quoted_parts)) def split_resource_name(resource_name: RelativeResourceName) -> list[str]: diff --git a/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py index 137463e1263..74a74940e40 100644 --- a/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py @@ -6,14 +6,14 @@ import urllib.parse import uuid from collections.abc import Callable -from datetime import datetime +from datetime import datetime, timezone from functools import lru_cache import arrow from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet from models_library.basic_types import KeyIDStr from models_library.projects_nodes import InputID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..models.basic_types import VersionStr from ..models.domain.projects import InputTypes, Node, SimCoreFileLink, StudyUI @@ -45,7 +45,7 @@ def format_datetime(snapshot: datetime) -> str: def now_str() -> str: # NOTE: backend MUST use UTC - return format_datetime(datetime.utcnow()) + return format_datetime(datetime.now(timezone.utc)) # CONVERTERS -------------- @@ -62,8 +62,8 @@ def create_node_inputs_from_job_inputs( node_inputs: dict[InputID, InputTypes] = {} for name, value in inputs.values.items(): - assert parse_obj_as(ArgumentTypes, value) == value # type: ignore # nosec - assert parse_obj_as(KeyIDStr, name) is not None # nosec + assert TypeAdapter(ArgumentTypes).validate_python(value) == value # type: ignore # nosec + assert TypeAdapter(KeyIDStr).validate_python(name) is not None # nosec if isinstance(value, File): # FIXME: ensure this aligns with storage policy @@ -88,9 +88,9 @@ def create_job_inputs_from_node_inputs(inputs: dict[InputID, InputTypes]) -> Job """ input_values: dict[str, ArgumentTypes] = {} for name, value in inputs.items(): - assert parse_obj_as(InputID, name) == name # nosec + assert TypeAdapter(InputID).validate_python(name) == name # nosec assert ( # nosec - parse_obj_as(InputTypes, value) == value # type: ignore[arg-type] + TypeAdapter(InputTypes).validate_python(value) == value # type: ignore[arg-type] ) if isinstance(value, SimCoreFileLink): @@ -141,15 +141,15 @@ def create_new_project_for_job( ) solver_service = Node( - key=solver.id, # type: ignore[arg-type] - version=solver.version, # type: ignore[arg-type] + key=solver.id, + version=solver.version, label=solver.title, inputs=solver_inputs, inputsUnits={}, ) # Ensembles project model so it can be used as input for create_project - job_info = job.json( + job_info = job.model_dump_json( include={"id", "name", "inputs_checksum", "created_at"}, indent=2 ) @@ -158,7 +158,7 @@ def create_new_project_for_job( name=job.name, # NOTE: this IS an identifier as well. MUST NOT be changed in the case of project APIs! description=f"Study associated to solver job:\n{job_info}", thumbnail="https://via.placeholder.com/170x120.png", # type: ignore[arg-type] - workbench={solver_id: solver_service}, # type: ignore[dict-item] + workbench={solver_id: solver_service}, ui=StudyUI( workbench={ f"{solver_id}": { # type: ignore[dict-item] @@ -208,10 +208,10 @@ def create_job_from_project( job = Job( id=job_id, - name=project.name, # type: ignore[arg-type] + name=project.name, inputs_checksum=job_inputs.compute_checksum(), created_at=project.creation_date, # type: ignore[arg-type] - runner_name=solver_name, # type: ignore + runner_name=solver_name, url=url_for( "get_job", solver_key=solver_key, @@ -231,7 +231,9 @@ def create_job_from_project( ), ) - assert all(getattr(job, f) for f in job.__fields__ if f.endswith("url")) # nosec + assert all( + getattr(job, f) for f in job.model_fields.keys() if f.endswith("url") + ) # nosec return job From 301d457fe62176be93d54b18f55bd368dc67f036 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 2 Oct 2024 16:25:40 +0200 Subject: [PATCH 185/280] fix mypy --- .../simcore_service_api_server/api/routes/solvers_jobs.py | 2 +- .../simcore_service_api_server/models/schemas/studies.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 560d656d5d4..4e10d367434 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -51,7 +51,7 @@ def _compose_job_resource_name(solver_key, solver_version, job_id) -> str: """Creates a unique resource name for solver's jobs""" return Job.compose_resource_name( - parent_name=Solver.compose_resource_name(solver_key, solver_version), # type: ignore + parent_name=Solver.compose_resource_name(solver_key, solver_version), job_id=job_id, ) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/studies.py b/services/api-server/src/simcore_service_api_server/models/schemas/studies.py index 96c63ee7910..806ec309017 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/studies.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/studies.py @@ -1,15 +1,17 @@ -from typing import TypeAlias +from typing import Annotated, TypeAlias from models_library import projects, projects_nodes_io from models_library.utils import pydantic_tools_extension -from pydantic import AnyUrl, BaseModel, Field +from pydantic import AnyUrl, BaseModel, BeforeValidator, Field, TypeAdapter from .. import api_resources from . import solvers StudyID: TypeAlias = projects.ProjectID NodeName: TypeAlias = str -DownloadLink: TypeAlias = AnyUrl +DownloadLink: TypeAlias = Annotated[ + str, BeforeValidator(lambda x: str(TypeAdapter(AnyUrl).validate_python(x))) +] class Study(BaseModel): From 1df117929f730c7200c6fb815e600336a0655d78 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 08:58:05 +0200 Subject: [PATCH 186/280] fix field name --- packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index 26cf9a4d360..d96e0403551 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -318,7 +318,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/no_file/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "no_file" From 9e99c516996a3b4862d8ae220f8183ff5ada158c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 09:32:05 +0200 Subject: [PATCH 187/280] fix validationerror match --- packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py index fc37076d56d..46588de6e87 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc.py @@ -351,7 +351,7 @@ async def _a_handler() -> None: if expect_fail: with pytest.raises( - ValidationError, match=r"String should have at most \d+ characters" + ValidationError, match="String should have at most 255 characters" ): await rpc_server.register_handler( RPCNamespace("a"), RPCMethodName(handler_name), _a_handler From b07d12bdbae0d2a6ebec67f2f8f14e10fcfa82bc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 09:32:53 +0200 Subject: [PATCH 188/280] fix optional field --- .../simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index bae02ab24d3..edf24e6e297 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -44,7 +44,7 @@ class Nodeports(BaseModel): ] auto_update: bool = False r_clone_settings: RCloneSettings | None = None - io_log_redirect_cb: LogRedirectCB | None = None + io_log_redirect_cb: LogRedirectCB | None aws_s3_cli_settings: AwsS3CliSettings | None = None model_config = ConfigDict( arbitrary_types_allowed=True, From 444a60052b7ec812b5414ab8e452bf9caf90359f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 09:37:22 +0200 Subject: [PATCH 189/280] fix comments --- packages/aws-library/src/aws_library/ec2/_models.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index ed260ad6ff0..d1ff9cb3cc9 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -68,8 +68,8 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str -# see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] AWSTagKey: TypeAlias = Annotated[ + # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] str, StringConstraints( min_length=1, @@ -78,9 +78,10 @@ class EC2InstanceType: ), ] -# see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] -# quotes []{} were added as it allows to json encode. it seems to be accepted as a value + AWSTagValue: TypeAlias = Annotated[ + # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] + # quotes []{} were added as it allows to json encode. it seems to be accepted as a value str, StringConstraints( min_length=0, From 2a8d6eb2982c71ad1ed28aa28e0ed8ff2694f33f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 09:42:33 +0200 Subject: [PATCH 190/280] remove redundant comment --- .../models-library/src/models_library/utils/serialization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/utils/serialization.py b/packages/models-library/src/models_library/utils/serialization.py index d05caf64c56..aae386a88a9 100644 --- a/packages/models-library/src/models_library/utils/serialization.py +++ b/packages/models-library/src/models_library/utils/serialization.py @@ -17,7 +17,7 @@ def model_dump_with_secrets( if isinstance(field_data, SecretStr): if show_secrets: - data[field_name] = field_data.get_secret_value() # Expose the raw value + data[field_name] = field_data.get_secret_value() else: data[field_name] = str(field_data) elif isinstance(field_data, dict): From 386c3160a08464cad38600f989c1c6bc5e12ac8f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 10:40:31 +0200 Subject: [PATCH 191/280] remove unnecessary schema patch --- packages/models-library/src/models_library/projects.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 43f75113b7b..120e54d899d 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -1,7 +1,6 @@ """ Models a study's project document """ -from copy import deepcopy from datetime import datetime from enum import Enum from typing import Any, Final, TypeAlias @@ -113,13 +112,6 @@ def convert_sql_alchemy_enum(cls, v): ) -def _patch_json_schema_extra(schema: dict) -> None: - # Patch to allow jsonschema nullable - # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530 - state_pydantic_schema = deepcopy(schema["properties"]["state"]) - schema["properties"]["state"] = {"anyOf": [{"type": "null"}, state_pydantic_schema]} - - class Project(BaseProjectModel): # NOTE: This is the pydantic pendant of project-v0.0.1.json used in the API of the webserver/webclient # NOT for usage with DB!! @@ -182,5 +174,4 @@ class Project(BaseProjectModel): model_config = ConfigDict( title="osparc-simcore project", extra="forbid", - json_schema_extra=_patch_json_schema_extra, ) From 68b1599dd36a3eb432f32fec8bcce87df5cd59a3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 11:06:02 +0200 Subject: [PATCH 192/280] upgrade CI requrements --- ci/helpers/requirements.txt | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/ci/helpers/requirements.txt b/ci/helpers/requirements.txt index 55a3ab4e163..daf26ed5c0b 100644 --- a/ci/helpers/requirements.txt +++ b/ci/helpers/requirements.txt @@ -1,17 +1,19 @@ # This file was autogenerated by uv via the following command: -# uv pip compile requirements.in +# uv pip compile requirements.in -o requirements.txt aiohttp==3.9.5 + # via + # -c ../../requirements/constraints.txt + # -r requirements.in aiosignal==1.3.1 # via aiohttp +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via starlette -async-timeout==4.0.3 - # via aiohttp attrs==23.2.0 # via aiohttp -exceptiongroup==1.2.1 - # via anyio -fastapi==0.99.1 +fastapi==0.115.0 + # via -r requirements.in frozenlist==1.4.1 # via # aiohttp @@ -24,16 +26,22 @@ multidict==6.0.5 # via # aiohttp # yarl -pydantic==1.10.15 - # via fastapi +pydantic==2.9.2 + # via + # -c ../../requirements/constraints.txt + # fastapi +pydantic-core==2.23.4 + # via pydantic sniffio==1.3.1 # via anyio -starlette==0.27.0 - # via fastapi +starlette==0.38.6 + # via + # -c ../../requirements/constraints.txt + # fastapi typing-extensions==4.11.0 # via - # anyio # fastapi # pydantic + # pydantic-core yarl==1.9.4 # via aiohttp From 42a86be98704272d230f314e92384d47af1e8037 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 11:52:35 +0200 Subject: [PATCH 193/280] run bump-pydantic --- .../simcore_service_catalog/core/settings.py | 29 ++++++++++++------- .../models/services_db.py | 24 +++++++-------- .../models/services_specifications.py | 4 +-- .../catalog/tests/unit/with_dbs/conftest.py | 10 +++---- 4 files changed, 36 insertions(+), 31 deletions(-) diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py index a3d8fbf2f01..9e8eeaf2952 100644 --- a/services/catalog/src/simcore_service_catalog/core/settings.py +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -7,7 +7,7 @@ ) from models_library.basic_types import BootModeEnum, BuildTargetEnum, LogLevel from models_library.services_resources import ResourcesDict -from pydantic import ByteSize, Field, PositiveInt, parse_obj_as +from pydantic import AliasChoices, ByteSize, Field, PositiveInt, TypeAdapter from settings_library.base import BaseCustomSettings from settings_library.http_client_request import ClientRequestSettings from settings_library.postgres import PostgresSettings @@ -27,16 +27,15 @@ def base_url(self) -> str: return f"http://{self.DIRECTOR_HOST}:{self.DIRECTOR_PORT}/{self.DIRECTOR_VTAG}" -_DEFAULT_RESOURCES: Final[ResourcesDict] = parse_obj_as( - ResourcesDict, +_DEFAULT_RESOURCES: Final[ResourcesDict] = TypeAdapter(ResourcesDict).validate_python( { "CPU": { "limit": 0.1, "reservation": 0.1, }, "RAM": { - "limit": parse_obj_as(ByteSize, "2Gib"), - "reservation": parse_obj_as(ByteSize, "2Gib"), + "limit": TypeAdapter(ByteSize).validate_python("2Gib"), + "reservation": TypeAdapter(ByteSize).validate_python("2Gib"), }, }, ) @@ -53,11 +52,13 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): CATALOG_LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=["CATALOG_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices("CATALOG_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices( + "CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) CATALOG_DEV_FEATURES_ENABLED: bool = Field( @@ -65,15 +66,21 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="Enables development features. WARNING: make sure it is disabled in production .env file!", ) - CATALOG_POSTGRES: PostgresSettings | None = Field(auto_default_from_env=True) + CATALOG_POSTGRES: PostgresSettings | None = Field( + json_schema_extra={"auto_default_from_env": True}, + ) - CATALOG_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) + CATALOG_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True}, + ) CATALOG_CLIENT_REQUEST: ClientRequestSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) - CATALOG_DIRECTOR: DirectorSettings | None = Field(auto_default_from_env=True) + CATALOG_DIRECTOR: DirectorSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) CATALOG_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True diff --git a/services/catalog/src/simcore_service_catalog/models/services_db.py b/services/catalog/src/simcore_service_catalog/models/services_db.py index 2fd92f479ac..b6aa767306a 100644 --- a/services/catalog/src/simcore_service_catalog/models/services_db.py +++ b/services/catalog/src/simcore_service_catalog/models/services_db.py @@ -1,12 +1,12 @@ from datetime import datetime -from typing import Any, ClassVar +from typing import Any from models_library.products import ProductName from models_library.services_access import ServiceGroupAccessRights from models_library.services_base import ServiceKeyVersion from models_library.services_metadata_editable import ServiceMetaDataEditable from models_library.services_types import ServiceKey, ServiceVersion -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from pydantic.types import PositiveInt from simcore_postgres_database.models.services_compatibility import CompatiblePolicyDict @@ -15,10 +15,9 @@ class ServiceMetaDataAtDB(ServiceKeyVersion, ServiceMetaDataEditable): # for a partial update all members must be Optional classifiers: list[str] | None = Field(default_factory=list) owner: PositiveInt | None - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "example": { "key": "simcore/services/dynamic/sim4life", "version": "1.0.9", @@ -49,7 +48,8 @@ class Config: }, }, } - } + }, + ) class ReleaseFromDB(BaseModel): @@ -92,10 +92,9 @@ class ServiceWithHistoryFromDB(BaseModel): class ServiceAccessRightsAtDB(ServiceKeyVersion, ServiceGroupAccessRights): gid: PositiveInt product_name: ProductName - - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "example": { "key": "simcore/services/dynamic/sim4life", "version": "1.0.9", @@ -106,4 +105,5 @@ class Config: "created": "2021-01-18 12:46:57.7315", "modified": "2021-01-19 12:45:00", } - } + }, + ) diff --git a/services/catalog/src/simcore_service_catalog/models/services_specifications.py b/services/catalog/src/simcore_service_catalog/models/services_specifications.py index ce40b492f07..d53e56a8c56 100644 --- a/services/catalog/src/simcore_service_catalog/models/services_specifications.py +++ b/services/catalog/src/simcore_service_catalog/models/services_specifications.py @@ -3,6 +3,7 @@ ) from models_library.services import ServiceKey, ServiceVersion from models_library.users import GroupID +from pydantic import ConfigDict class ServiceSpecificationsAtDB(ServiceSpecifications): @@ -10,5 +11,4 @@ class ServiceSpecificationsAtDB(ServiceSpecifications): service_version: ServiceVersion gid: GroupID - class Config(ServiceSpecifications.Config): - orm_mode: bool = True + model_config = ConfigDict(from_attributes=True) diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index b5284c22259..aeaedaf4d48 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -17,7 +17,7 @@ from models_library.products import ProductName from models_library.services import ServiceMetaDataPublished from models_library.users import UserID -from pydantic import Extra, parse_obj_as +from pydantic import ConfigDict, TypeAdapter from pytest_simcore.helpers.faker_factories import ( random_service_access_rights, random_service_meta_data, @@ -121,13 +121,13 @@ async def product( @pytest.fixture def target_product(product: dict[str, Any], product_name: ProductName) -> ProductName: - assert product_name == parse_obj_as(ProductName, product["name"]) + assert product_name == TypeAdapter(ProductName).validate_python(product["name"]) return product_name @pytest.fixture def other_product(product: dict[str, Any]) -> ProductName: - other = parse_obj_as(ProductName, "osparc") + other = TypeAdapter(ProductName).validate_python("osparc") assert other != product["name"] return other @@ -453,9 +453,7 @@ def create_director_list_services_from() -> Callable[ """ class _Loader(ServiceMetaDataPublished): - class Config: - extra = Extra.ignore - allow_population_by_field_name = True + model_config = ConfigDict(extra="ignore", populate_by_name=True) def _( expected_director_list_services: list[dict[str, Any]], From 12dc421cf5c9202e55d02435972f9ddb6c839b5d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 12:37:14 +0200 Subject: [PATCH 194/280] continue upgrading --- .../api/rest/_services_resources.py | 22 +++++++++++-------- .../models/services_db.py | 4 ++-- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py index 1d3ef4515a3..9281dded25b 100644 --- a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py +++ b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py @@ -20,7 +20,7 @@ ServiceResourcesDictHelpers, ) from models_library.utils.docker_compose import replace_env_vars_in_compose_spec -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter from ..._constants import RESPONSE_MODEL_POLICY, SIMCORE_SERVICE_SETTINGS_LABELS from ...db.repositories.services import ServicesRepository @@ -61,7 +61,7 @@ def _compute_service_available_boot_modes( if not isinstance(entry.value, dict): _logger.warning( "resource %s for %s got invalid type", - f"{entry.dict()!r}", + f"{entry.model_dump()!r}", f"{service_key}:{service_version}", ) continue @@ -99,7 +99,7 @@ def _resources_from_settings( if not isinstance(entry.value, dict): _logger.warning( "resource %s for %s got invalid type", - f"{entry.dict()!r}", + f"{entry.model_dump()!r}", f"{service_key}:{service_version}", ) continue @@ -156,8 +156,7 @@ async def _get_service_labels( def _get_service_settings( labels: dict[str, Any] ) -> list[SimcoreServiceSettingLabelEntry]: - service_settings = parse_raw_as( - list[SimcoreServiceSettingLabelEntry], + service_settings = TypeAdapter(list[SimcoreServiceSettingLabelEntry]).validate_json( labels.get(SIMCORE_SERVICE_SETTINGS_LABELS, "[]"), ) _logger.debug("received %s", f"{service_settings=}") @@ -181,7 +180,9 @@ async def get_service_resources( ], user_groups: Annotated[list[GroupAtDB], Depends(list_user_groups)], ) -> ServiceResourcesDict: - image_version = parse_obj_as(DockerGenericTag, f"{service_key}:{service_version}") + image_version = TypeAdapter(DockerGenericTag).validate_python( + f"{service_key}:{service_version}" + ) if is_function_service(service_key): return ServiceResourcesDictHelpers.create_from_single_service( image_version, default_service_resources @@ -196,8 +197,9 @@ async def get_service_resources( image_version, default_service_resources ) - service_spec: ComposeSpecLabelDict | None = parse_raw_as( - ComposeSpecLabelDict | None, # type: ignore[arg-type] + service_spec: ComposeSpecLabelDict | None = TypeAdapter( + ComposeSpecLabelDict | None + ).validate_python( service_labels.get(SIMCORE_SERVICE_COMPOSE_SPEC_LABEL, "null"), ) _logger.debug("received %s", f"{service_spec=}") @@ -235,7 +237,9 @@ async def get_service_resources( ) full_service_spec: ComposeSpecLabelDict = yaml.safe_load(stringified_service_spec) - service_to_resources: ServiceResourcesDict = parse_obj_as(ServiceResourcesDict, {}) + service_to_resources: ServiceResourcesDict = TypeAdapter( + ServiceResourcesDict + ).validate_python({}) for spec_key, spec_data in full_service_spec["services"].items(): # image can be: diff --git a/services/catalog/src/simcore_service_catalog/models/services_db.py b/services/catalog/src/simcore_service_catalog/models/services_db.py index b6aa767306a..0abb7883e2e 100644 --- a/services/catalog/src/simcore_service_catalog/models/services_db.py +++ b/services/catalog/src/simcore_service_catalog/models/services_db.py @@ -83,9 +83,9 @@ class ServiceWithHistoryFromDB(BaseModel): assert ( # nosec - set(ReleaseFromDB.__fields__) + set(ReleaseFromDB.model_fields) .difference({"compatibility_policy"}) - .issubset(set(ServiceWithHistoryFromDB.__fields__)) + .issubset(set(ServiceWithHistoryFromDB.model_fields)) ) From 3aa900965db779c1ed826f274afd9118394daa07 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 3 Oct 2024 15:50:31 +0200 Subject: [PATCH 195/280] remove unnecessary cast --- packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py | 2 +- packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py index 3f7bf0e96b3..f971ef9b8f7 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py @@ -26,7 +26,7 @@ async def ec2_client( exit_stack = contextlib.AsyncExitStack() session_client = session.client( "ec2", - endpoint_url=str(ec2_settings.EC2_ENDPOINT), + endpoint_url=ec2_settings.EC2_ENDPOINT, aws_access_key_id=ec2_settings.EC2_ACCESS_KEY_ID, aws_secret_access_key=ec2_settings.EC2_SECRET_ACCESS_KEY, region_name=ec2_settings.EC2_REGION_NAME, diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py index cf4feca410b..e6afeac8e7b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py @@ -25,7 +25,7 @@ async def s3_client(s3_settings: S3Settings) -> typing.AsyncIterator[S3Client]: exit_stack = contextlib.AsyncExitStack() session_client = session.client( "s3", - endpoint_url=str(s3_settings.S3_ENDPOINT), + endpoint_url=s3_settings.S3_ENDPOINT, aws_access_key_id=s3_settings.S3_ACCESS_KEY, aws_secret_access_key=s3_settings.S3_SECRET_KEY, region_name=s3_settings.S3_REGION, From 4c034d2ca6014fbe6de580340cdce3c736d77be1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 09:55:46 +0200 Subject: [PATCH 196/280] add final --- packages/models-library/src/models_library/rest_pagination.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index ff10ebf4ead..0163cebc322 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -15,7 +15,7 @@ from .utils.common_validators import none_to_empty_list_pre_validator -_ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) +_ANY_HTTP_URL_ADAPTER: Final[TypeAdapter[AnyHttpUrl]] = TypeAdapter(AnyHttpUrl) # Default limit values # - Using same values across all pagination entrypoints simplifies From 24679c1291a99a7ba8ed5f0afdaa66d857ddaa50 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 09:56:37 +0200 Subject: [PATCH 197/280] add final --- packages/models-library/src/models_library/rest_pagination.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index ff10ebf4ead..0163cebc322 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -15,7 +15,7 @@ from .utils.common_validators import none_to_empty_list_pre_validator -_ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) +_ANY_HTTP_URL_ADAPTER: Final[TypeAdapter[AnyHttpUrl]] = TypeAdapter(AnyHttpUrl) # Default limit values # - Using same values across all pagination entrypoints simplifies From c9f094f203f455089afdcce2e26dc8a61ed7bf99 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 11:26:55 +0200 Subject: [PATCH 198/280] fix ValidationError import --- .../tests/unit/test_node_ports_v2_port_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py index d636bb9c3f6..ee0d19cec90 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port_validation.py @@ -14,7 +14,7 @@ import pytest from pydantic import BaseModel, Field, schema_of -from pydantic.error_wrappers import ValidationError +from pydantic import ValidationError from simcore_sdk.node_ports_v2.port import Port from simcore_sdk.node_ports_v2.port_validation import ( PortUnitError, From fee36c01cca59761ff622e36d9a6929148d2b0c3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 11:32:37 +0200 Subject: [PATCH 199/280] fix mypy --- .../tests/unit/test_node_ports_common_file_io_utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index a587aade5bb..0540daa58d1 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -19,7 +19,7 @@ UploadedPart, ) from moto.server import ThreadedMotoServer -from pydantic import AnyUrl, ByteSize, TypeAdapter, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock import MockerFixture from servicelib.aiohttp import status from servicelib.progress_bar import ProgressBarData @@ -212,8 +212,7 @@ async def _creator(num_upload_links: int, chunk_size: ByteSize) -> FileUploadSch assert "UploadId" in response upload_id = response["UploadId"] - upload_links = parse_obj_as( - list[AnyUrl], + upload_links = TypeAdapter(list[AnyUrl]).validate_python( await asyncio.gather( *[ aiobotocore_s3_client.generate_presigned_url( @@ -258,6 +257,7 @@ async def test_upload_file_to_presigned_links( create_file_of_size: Callable[[ByteSize], Path], file_size: ByteSize, used_chunk_size: ByteSize, + faker: Faker, ): """This test is here to reproduce the issue https://github.com/ITISFoundation/osparc-simcore/issues/3531 One theory is that something might be wrong in how the chunking is done and that AWS times out @@ -272,11 +272,11 @@ async def test_upload_file_to_presigned_links( """ local_file = create_file_of_size(file_size) num_links = 2080 - effective_chunk_size = parse_obj_as(ByteSize, local_file.stat().st_size / num_links) + effective_chunk_size = TypeAdapter(ByteSize).validate_python(local_file.stat().st_size / num_links) assert effective_chunk_size <= used_chunk_size upload_links = await create_upload_links(num_links, used_chunk_size) assert len(upload_links.urls) == num_links - async with ProgressBarData(num_steps=1) as progress_bar: + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_links( session=client_session, file_upload_links=upload_links, From 79a4641caf63abf1b0ce61b83e58d1bb9ddfbede Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 13:10:00 +0200 Subject: [PATCH 200/280] fix missing callback --- .../tests/unit/test_node_ports_v2_nodeports_v2.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 91609476b9c..1bc1fcde664 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -50,6 +50,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -62,6 +63,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=auto_update, @@ -101,6 +103,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -113,6 +116,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -182,6 +186,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, @@ -194,6 +199,7 @@ async def mock_node_port_creator_cb(*args, **kwargs): user_id=user_id, project_id=project_id, node_uuid=node_uuid, + io_log_redirect_cb=None, save_to_db_cb=mock_save_db_cb, node_port_creator_cb=mock_node_port_creator_cb, auto_update=False, From 06c80461637b723751e73a1aa00a5a309577f618 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 14:00:41 +0200 Subject: [PATCH 201/280] fix field name --- .../tests/unit/test_node_ports_v2_port.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index d96e0403551..f8dbe5d0e8d 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -341,7 +341,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/no_file_with_default/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "no_file_with_default" @@ -429,7 +429,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/some_file_on_datcore/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "some_file_on_datcore" @@ -458,7 +458,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/download_link/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "download_link" @@ -490,7 +490,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/download_link_with_file_to_key/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "download_link_with_file_to_key" @@ -521,7 +521,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/file_port_link/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "file_port_link" @@ -555,7 +555,7 @@ class PortParams(NamedTuple): exp_new_value=FileLink( store=simcore_store_id(), path=f"{project_id()}/{node_uuid()}/file_port_link_with_file_to_key_map/{this_node_file_name().name}", - eTag=e_tag(), + e_tag=e_tag(), ), exp_new_get_value=download_file_folder_name() / "file_port_link_with_file_to_key_map" From 323dbc97b4494e80ab23ee388ce55cdd14266a58 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 15:16:32 +0200 Subject: [PATCH 202/280] code not used by base class --- packages/models-library/src/models_library/errors_classes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/models-library/src/models_library/errors_classes.py index c6899084b12..48fac42b15c 100644 --- a/packages/models-library/src/models_library/errors_classes.py +++ b/packages/models-library/src/models_library/errors_classes.py @@ -18,7 +18,7 @@ def __new__(cls, *_args, **_kwargs): def __init__(self, **ctx: Any) -> None: self.__dict__ = ctx - super().__init__(message=self._build_message(), code=self.code) + super().__init__(message=self._build_message(), code=None) def __str__(self) -> str: return self._build_message() From 9a8e7183b9785f9bba9a1faebae63522ae089547 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 15:20:33 +0200 Subject: [PATCH 203/280] revert latest commit --- packages/models-library/src/models_library/errors_classes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/models-library/src/models_library/errors_classes.py index 48fac42b15c..c6899084b12 100644 --- a/packages/models-library/src/models_library/errors_classes.py +++ b/packages/models-library/src/models_library/errors_classes.py @@ -18,7 +18,7 @@ def __new__(cls, *_args, **_kwargs): def __init__(self, **ctx: Any) -> None: self.__dict__ = ctx - super().__init__(message=self._build_message(), code=None) + super().__init__(message=self._build_message(), code=self.code) def __str__(self) -> str: return self._build_message() From 42f07ea00b0f94f91bdc303fc7a89a4fd9a449c1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 15:36:27 +0200 Subject: [PATCH 204/280] set final --- .../models-library/src/models_library/projects_nodes_io.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 6b59d356e8a..412a6927462 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -7,7 +7,7 @@ """ from pathlib import Path -from typing import Annotated, TypeAlias +from typing import Annotated, Final, TypeAlias from uuid import UUID from models_library.basic_types import ConstrainedStr, KeyIDStr @@ -45,7 +45,7 @@ ] -_ANY_URL_ADAPTER: TypeAdapter[AnyUrl] = TypeAdapter(AnyUrl) +_ANY_URL_ADAPTER: Final[TypeAdapter[AnyUrl]] = TypeAdapter(AnyUrl) class SimcoreS3DirectoryID(ConstrainedStr): From 8e756b76190f15c2083e3a19969be780b6ce6ee2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 15:59:28 +0200 Subject: [PATCH 205/280] continue fixing --- .../src/simcore_service_dynamic_sidecar/core/errors.py | 4 ++-- .../modules/user_services_preferences/_errors.py | 4 ++-- .../modules/user_services_preferences/_packaging.py | 6 ++++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py index b0fd128a942..63c6881fd7f 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py @@ -1,8 +1,8 @@ from typing import Any from fastapi import status +from models_library.errors_classes import OsparcErrorMixin from models_library.services import RunID -from pydantic.errors import PydanticErrorMixin class BaseDynamicSidecarError(Exception): @@ -35,7 +35,7 @@ def __init__(self, message: str, status_code: int) -> None: ) -class BaseError(PydanticErrorMixin, BaseDynamicSidecarError): +class BaseError(OsparcErrorMixin, BaseDynamicSidecarError): code = "dy_sidecar.error" diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py index d1b373ac1f5..f226502b24d 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class BaseServicesPreferencesError(PydanticErrorMixin, Exception): +class BaseServicesPreferencesError(OsparcErrorMixin, Exception): code = "dynamic_sidecar.user_service_preferences" diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py index 38f2eab7111..bdffd81a4a9 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_packaging.py @@ -2,13 +2,15 @@ from typing import Final import aiofiles -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.archiving_utils import archive_dir, unarchive_dir from servicelib.file_utils import remove_directory from ._errors import DestinationIsNotADirectoryError, PreferencesAreTooBigError -_MAX_PREFERENCES_TOTAL_SIZE: Final[ByteSize] = parse_obj_as(ByteSize, "128kib") +_MAX_PREFERENCES_TOTAL_SIZE: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( + "128kib" +) async def dir_to_bytes(source: Path) -> bytes: From ccee1d5a83236099859ce30b690cd3a5bff19ee5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 16:54:57 +0200 Subject: [PATCH 206/280] continue upgrading --- .../services/invitations.py | 10 +++++----- services/invitations/tests/unit/test_cli.py | 8 ++++++-- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/services/invitations/src/simcore_service_invitations/services/invitations.py b/services/invitations/src/simcore_service_invitations/services/invitations.py index 582014925de..33f600dc8e2 100644 --- a/services/invitations/src/simcore_service_invitations/services/invitations.py +++ b/services/invitations/src/simcore_service_invitations/services/invitations.py @@ -26,8 +26,8 @@ class _ContentWithShortNames(InvitationContent): @classmethod def serialize(cls, model_obj: InvitationContent) -> str: """Exports to json using *short* aliases and values in order to produce shorter codes""" - model_w_short_aliases_json: str = cls.construct( - **model_obj.dict(exclude_unset=True) + model_w_short_aliases_json: str = cls.model_construct( + **model_obj.model_dump(exclude_unset=True) ).model_dump_json(exclude_unset=True, by_alias=True) # NOTE: json arguments try to minimize the amount of data # serialized. The CONS is that it relies on models in the code @@ -38,9 +38,9 @@ def serialize(cls, model_obj: InvitationContent) -> str: @classmethod def deserialize(cls, raw_json: str) -> InvitationContent: """Parses a json string and returns InvitationContent model""" - model_w_short_aliases = cls.parse_raw(raw_json) - return InvitationContent.construct( - **model_w_short_aliases.dict(exclude_unset=True) + model_w_short_aliases = cls.model_validate_json(raw_json) + return InvitationContent.model_construct( + **model_w_short_aliases.model_dump(exclude_unset=True) ) model_config = ConfigDict( diff --git a/services/invitations/tests/unit/test_cli.py b/services/invitations/tests/unit/test_cli.py index 6a631e660e4..c83623a2f8b 100644 --- a/services/invitations/tests/unit/test_cli.py +++ b/services/invitations/tests/unit/test_cli.py @@ -8,6 +8,7 @@ import pytest from faker import Faker from models_library.products import ProductName +from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_invitations._meta import API_VERSION @@ -71,7 +72,10 @@ def test_invite_user_and_check_invitation( env=environs, ) assert result.exit_code == os.EX_OK, result.output - assert expected == InvitationInputs.parse_raw(result.stdout).dict() + assert ( + expected + == TypeAdapter(InvitationInputs).validate_json(result.stdout).model_dump() + ) def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): @@ -93,5 +97,5 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): assert result.exit_code == os.EX_OK, result.output print(result.output) - settings = ApplicationSettings.parse_raw(result.output) + settings = TypeAdapter(ApplicationSettings).validate_json(result.output) assert settings == ApplicationSettings.create_from_envs() From a839523430f7c673e763adb64d7a84171e4aaea0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 4 Oct 2024 16:59:59 +0200 Subject: [PATCH 207/280] fix leading / in url --- .../src/simcore_service_invitations/services/invitations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/invitations/src/simcore_service_invitations/services/invitations.py b/services/invitations/src/simcore_service_invitations/services/invitations.py index 33f600dc8e2..fcdc3d0ce1d 100644 --- a/services/invitations/src/simcore_service_invitations/services/invitations.py +++ b/services/invitations/src/simcore_service_invitations/services/invitations.py @@ -64,7 +64,7 @@ def _build_link( r = URL("/registration").include_query_params(invitation=code_url_safe) # Adds query to fragment - base_url = f"{base_url.rstrip('/')}/" + base_url = f"{base_url}" url = URL(base_url).replace(fragment=f"{r}") return TypeAdapter(HttpUrl).validate_python(f"{url}") From 173256473ecdef9f6faf384a7e9c071955464781 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Sat, 5 Oct 2024 11:03:27 +0200 Subject: [PATCH 208/280] fixed tests in dynamic-scheduler --- .../dynamic_services_service.py | 4 +- .../api_schemas_webserver/projects_nodes.py | 2 +- .../src/models_library/utils/serialization.py | 4 ++ .../dynamic_scheduler/errors.py | 4 +- .../src/settings_library/base.py | 2 +- .../simcore_service_dynamic_scheduler/cli.py | 16 +------- .../core/settings.py | 19 +++++++++- .../dynamic-scheduler/tests/unit/test_cli.py | 38 ++++++++++++++++--- 8 files changed, 62 insertions(+), 27 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py index f0958695e15..d103a3ea8c5 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py @@ -33,7 +33,7 @@ class CommonServiceDetails(BaseModel): class ServiceDetails(CommonServiceDetails): - basepath: Path = Field( + basepath: Path | None = Field( default=None, description="predefined path where the dynamic service should be served. If empty, the service shall use the root endpoint.", alias="service_basepath", @@ -68,7 +68,7 @@ class RunningDynamicServiceDetails(ServiceDetails): internal_port: PortInt = Field( ..., description="the service swarm internal port", alias="service_port" ) - published_port: PortInt = Field( + published_port: PortInt | None = Field( default=None, description="the service swarm published port if any", deprecated=True, diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index cda166e0d13..02fabd46f7a 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -103,7 +103,7 @@ class NodeGet(OutputSchema): "service_basepath": "/x/E1O2E-LAH", "service_state": "pending", "service_message": "no suitable node (insufficient resources on 1 node)", - "user_id": 123, + "user_id": "123", } } ) diff --git a/packages/models-library/src/models_library/utils/serialization.py b/packages/models-library/src/models_library/utils/serialization.py index aae386a88a9..3589fdceeb2 100644 --- a/packages/models-library/src/models_library/utils/serialization.py +++ b/packages/models-library/src/models_library/utils/serialization.py @@ -1,3 +1,4 @@ +from datetime import timedelta from typing import Any from models_library.utils.pydantic_fields_extension import get_type @@ -15,6 +16,9 @@ def model_dump_with_secrets( field_data = data[field_name] + if isinstance(field_data, timedelta): + data[field_name] = field_data.total_seconds() + if isinstance(field_data, SecretStr): if show_secrets: data[field_name] = field_data.get_secret_value() diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py index 5e104db333c..045ba8638a8 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from models_library.errors_classes import OsparcErrorMixin -class BaseDynamicSchedulerRPCError(PydanticErrorMixin, Exception): +class BaseDynamicSchedulerRPCError(OsparcErrorMixin, Exception): ... diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 97b50bd6e58..2615a74f35e 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -44,7 +44,7 @@ def _default_factory(): field_name, ) return None - + _logger.warning("Validation errors=%s", err.errors()) raise DefaultFromEnvFactoryError(errors=err.errors()) from err return _default_factory diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py index e06b8f25129..79e91df7e30 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py @@ -1,8 +1,6 @@ import logging -import os import typer -from settings_library.rabbit import RabbitSettings from settings_library.utils_cli import ( create_settings_command, create_version_callback, @@ -40,19 +38,7 @@ def echo_dotenv(ctx: typer.Context, *, minimal: bool = True): # Nonetheless, if the caller of this CLI has already some **valid** env vars in the environment we want to use them ... # and that is why we use `os.environ`. - settings = ApplicationSettings.create_from_envs( - DYNAMIC_SCHEDULER_RABBITMQ=os.environ.get( - "DYNAMIC_SCHEDULER_RABBITMQ", - RabbitSettings.create_from_envs( - RABBIT_HOST=os.environ.get("RABBIT_HOST", "replace-with-rabbit-host"), - RABBIT_SECURE=os.environ.get("RABBIT_SECURE", "0"), - RABBIT_USER=os.environ.get("RABBIT_USER", "replace-with-rabbit-user"), - RABBIT_PASSWORD=os.environ.get( - "RABBIT_PASSWORD", "replace-with-rabbit-user" - ), - ), - ), - ) + settings = ApplicationSettings.create_from_envs() print_as_envfile( settings, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index 1a38cf336cb..df1f8996c52 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -1,7 +1,7 @@ import datetime from functools import cached_property -from pydantic import Field, parse_obj_as, validator +from pydantic import Field, field_validator, parse_obj_as, validator from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag from settings_library.director_v2 import DirectorV2Settings @@ -43,6 +43,23 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ) + # TODO: this should be a common validator put in some common library and not here to allow reuse + # wherever we used timedelta this should be in place otherwise it will fail where we overwrite the + # values via env vars + # GCR we need to talk where to place this one + @field_validator("DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT", mode="before") + @classmethod + def interpret_t_as_seconds( + cls, v: datetime.timedelta | str | float + ) -> datetime.timedelta | float | str: + if isinstance(v, str): + try: + return float(v) + except ValueError: + # returns format like "1:00:00" + return v + return v + @cached_property def LOG_LEVEL(self): # noqa: N802 return self.DYNAMIC_SCHEDULER__LOGLEVEL diff --git a/services/dynamic-scheduler/tests/unit/test_cli.py b/services/dynamic-scheduler/tests/unit/test_cli.py index 2e812f7e118..17087043e3d 100644 --- a/services/dynamic-scheduler/tests/unit/test_cli.py +++ b/services/dynamic-scheduler/tests/unit/test_cli.py @@ -1,8 +1,11 @@ +# pylint:disable=redefined-outer-name # pylint:disable=unused-argument import os +import traceback import pytest +from click.testing import Result from pytest_simcore.helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_dynamic_scheduler._meta import API_VERSION @@ -11,20 +14,45 @@ from typer.testing import CliRunner +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_dynamic_scheduler_env_vars: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **docker_compose_service_dynamic_scheduler_env_vars, + "RABBIT_HOST": "rabbit-host", + "RABBIT_SECURE": "0", + "RABBIT_USER": "rabbit-user", + "RABBIT_PASSWORD": "rabbit-password", + }, + ) + + +def _format_cli_error(result: Result) -> str: + assert result.exception + tb_message = "\n".join(traceback.format_tb(result.exception.__traceback__)) + return f"Below exception was raised by the cli:\n{tb_message}\n{result.stdout}" + + def test_cli_help_and_version(cli_runner: CliRunner): # simcore-service-dynamic-scheduler --help result = cli_runner.invoke(cli_main, "--help") - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) result = cli_runner.invoke(cli_main, "--version") - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) assert result.stdout.strip() == API_VERSION -def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): +def test_echo_dotenv( + app_environment: EnvVarsDict, cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch +): # simcore-service-dynamic-scheduler echo-dotenv result = cli_runner.invoke(cli_main, "echo-dotenv") - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) environs = load_dotenv(result.stdout) @@ -36,7 +64,7 @@ def test_echo_dotenv(cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch): def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): # simcore-service-dynamic-scheduler settings --show-secrets --as-json result = cli_runner.invoke(cli_main, ["settings", "--show-secrets", "--as-json"]) - assert result.exit_code == os.EX_OK, result.output + assert result.exit_code == os.EX_OK, _format_cli_error(result) print(result.output) settings = ApplicationSettings.parse_raw(result.output) From 968b5ab764b80c80decb0cc38b45cbd4e6ad7253 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 09:28:13 +0200 Subject: [PATCH 209/280] Add common library --- packages/common-library/.gitignore | 3 ++ packages/common-library/Makefile | 49 ++++++++++++++++++++++++ packages/common-library/README.md | 40 ++++++++++++++++++++ packages/common-library/VERSION | 1 + packages/common-library/setup.cfg | 24 ++++++++++++ packages/common-library/setup.py | 60 ++++++++++++++++++++++++++++++ 6 files changed, 177 insertions(+) create mode 100644 packages/common-library/.gitignore create mode 100644 packages/common-library/Makefile create mode 100644 packages/common-library/README.md create mode 100644 packages/common-library/VERSION create mode 100644 packages/common-library/setup.cfg create mode 100644 packages/common-library/setup.py diff --git a/packages/common-library/.gitignore b/packages/common-library/.gitignore new file mode 100644 index 00000000000..98149591573 --- /dev/null +++ b/packages/common-library/.gitignore @@ -0,0 +1,3 @@ + +# erdantic outputs +erd-*.svg diff --git a/packages/common-library/Makefile b/packages/common-library/Makefile new file mode 100644 index 00000000000..b554ec6f9c0 --- /dev/null +++ b/packages/common-library/Makefile @@ -0,0 +1,49 @@ +# +# Targets for DEVELOPMENT of common Library +# +include ../../scripts/common.Makefile +include ../../scripts/common-package.Makefile + +.PHONY: requirements +requirements: ## compiles pip requirements (.in -> .txt) + @$(MAKE_C) requirements reqs + + +.PHONY: install-dev install-prod install-ci +install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode + # installing in $(subst install-,,$@) mode + @uv pip sync requirements/$(subst install-,,$@).txt + + +.PHONY: tests tests-ci +tests: ## runs unit tests + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov=common_library \ + --durations=10 \ + --exitfirst \ + --failed-first \ + --pdb \ + -vv \ + $(CURDIR)/tests + +tests-ci: ## runs unit tests [ci-mode] + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-append \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov-report=xml \ + --cov=common_library \ + --durations=10 \ + --log-date-format="%Y-%m-%d %H:%M:%S" \ + --log-format="%(asctime)s %(levelname)s %(message)s" \ + --verbose \ + -m "not heavy_load" \ + $(CURDIR)/tests diff --git a/packages/common-library/README.md b/packages/common-library/README.md new file mode 100644 index 00000000000..ee3957a4031 --- /dev/null +++ b/packages/common-library/README.md @@ -0,0 +1,40 @@ +# simcore pydantic common library + +## Installation + +```console +make help +make install-dev +``` + +## Test + +```console +make help +make test-dev +``` + + +## Diagnostics + +How run diagnostics on the service metadata published in a docker registry? + +1. Setup environment +```bash +make devenv +source .venv/bin/activate + +cd packages/common-library +make install-dev +``` +2. Set ``REGISTRY_*`` env vars in ``.env`` (in the repository base folder) +3. Download test data, run diagnostics, archive tests-data, and cleanup +```bash +export DEPLOY_NAME=my-deploy + +make pull_test_data >$DEPLOY_NAME-registry-diagnostics.log 2>&1 +pytest -vv -m diagnostics >>$DEPLOY_NAME-registry-diagnostics.log 2>&1 +zip -r $DEPLOY_NAME-registry-test-data.zip tests/data/.downloaded-ignore +rm -r tests/data/.downloaded-ignore +``` +4. Move all ``$DEPLOY_NAME-*`` files to an archive diff --git a/packages/common-library/VERSION b/packages/common-library/VERSION new file mode 100644 index 00000000000..6e8bf73aa55 --- /dev/null +++ b/packages/common-library/VERSION @@ -0,0 +1 @@ +0.1.0 diff --git a/packages/common-library/setup.cfg b/packages/common-library/setup.cfg new file mode 100644 index 00000000000..07196483569 --- /dev/null +++ b/packages/common-library/setup.cfg @@ -0,0 +1,24 @@ +[bumpversion] +current_version = 0.1.0 +commit = True +message = packages/common-library version: {current_version} → {new_version} +tag = False +commit_args = --no-verify + +[bumpversion:file:VERSION] + +[bdist_wheel] +universal = 1 + +[aliases] +test = pytest + +[tool:pytest] +asyncio_mode = auto +markers = + diagnostics: "can be used to run diagnostics against deployed data (e.g. database, registry etc)" + testit: "marks test to run during development" + +[mypy] +plugins = + pydantic.mypy diff --git a/packages/common-library/setup.py b/packages/common-library/setup.py new file mode 100644 index 00000000000..24dded82a04 --- /dev/null +++ b/packages/common-library/setup.py @@ -0,0 +1,60 @@ +import re +import sys +from pathlib import Path + +from setuptools import find_packages, setup + + +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } + + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +INSTALL_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.in") +) # WEAK requirements + +TEST_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_test.txt") +) # STRICK requirements + + +SETUP = { + "name": "simcore-common-library", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), + "author": "Sylvain Anderegg (sanderegg)", + "description": "Core service library for simcore pydantic common", + "python_requires": "~=3.10", + "classifiers": [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.10", + ], + "long_description": Path(CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "install_requires": INSTALL_REQUIREMENTS, + "packages": find_packages(where="src"), + "package_data": {"": ["py.typed"]}, + "package_dir": {"": "src"}, + "include_package_data": True, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "zip_safe": False, +} + + +if __name__ == "__main__": + setup(**SETUP) From 67304b6e2287aa2ad36aeccfd179ab3476d35a9d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 10:41:55 +0200 Subject: [PATCH 210/280] add code field --- packages/models-library/src/models_library/errors_classes.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/models-library/src/models_library/errors_classes.py index c6899084b12..08f32d39f8f 100644 --- a/packages/models-library/src/models_library/errors_classes.py +++ b/packages/models-library/src/models_library/errors_classes.py @@ -10,15 +10,16 @@ def __missing__(self, key): class OsparcErrorMixin(PydanticErrorMixin): msg_template: str + code: str # type: ignore[assignment] def __new__(cls, *_args, **_kwargs): if not hasattr(cls, "code"): - cls.code = cls._get_full_class_name() # type: ignore[assignment] + cls.code = cls._get_full_class_name() return super().__new__(cls) def __init__(self, **ctx: Any) -> None: self.__dict__ = ctx - super().__init__(message=self._build_message(), code=self.code) + super().__init__(message=self._build_message(), code=self.code) # type: ignore[arg-type] def __str__(self) -> str: return self._build_message() From 36cda93bc3718c852a241c24562bfd55086cff47 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 11:17:40 +0200 Subject: [PATCH 211/280] continue upgrading --- .../src/aws_library/ec2/__init__.py | 8 +++++ .../src/aws_library/ec2/_models.py | 10 ++++-- .../utils/buffer_machines_pool_core.py | 32 ++++++++++++------- 3 files changed, 36 insertions(+), 14 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/__init__.py b/packages/aws-library/src/aws_library/ec2/__init__.py index 02fcf10b00e..112c70861b2 100644 --- a/packages/aws-library/src/aws_library/ec2/__init__.py +++ b/packages/aws-library/src/aws_library/ec2/__init__.py @@ -1,6 +1,10 @@ from ._client import SimcoreEC2API from ._errors import EC2AccessError, EC2NotConnectedError, EC2RuntimeError from ._models import ( + AWS_TAG_KEY_MAX_LENGTH, + AWS_TAG_KEY_MIN_LENGTH, + AWS_TAG_VALUE_MAX_LENGTH, + AWS_TAG_VALUE_MIN_LENGTH, AWSTagKey, AWSTagValue, EC2InstanceBootSpecific, @@ -14,6 +18,10 @@ __all__: tuple[str, ...] = ( "AWSTagKey", "AWSTagValue", + "AWS_TAG_KEY_MIN_LENGTH", + "AWS_TAG_KEY_MAX_LENGTH", + "AWS_TAG_VALUE_MIN_LENGTH", + "AWS_TAG_VALUE_MAX_LENGTH", "EC2AccessError", "EC2InstanceBootSpecific", "EC2InstanceConfig", diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index d1ff9cb3cc9..ad15a74f0eb 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -2,7 +2,7 @@ import re import tempfile from dataclasses import dataclass -from typing import Annotated, TypeAlias +from typing import Annotated, Final, TypeAlias import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag @@ -68,17 +68,21 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str +AWS_TAG_KEY_MIN_LENGTH: Final[int] = 1 +AWS_TAG_KEY_MAX_LENGTH: Final[int] = 128 AWSTagKey: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] str, StringConstraints( - min_length=1, - max_length=128, + min_length=AWS_TAG_KEY_MIN_LENGTH, + max_length=AWS_TAG_KEY_MAX_LENGTH, pattern=re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$"), ), ] +AWS_TAG_VALUE_MIN_LENGTH: Final[int] = 0 +AWS_TAG_VALUE_MAX_LENGTH: Final[int] = 256 AWSTagValue: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] # quotes []{} were added as it allows to json encode. it seems to be accepted as a value diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py b/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py index 133708001ae..c25f72ee546 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py @@ -1,11 +1,11 @@ from collections.abc import Iterable from operator import itemgetter -from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2Tags +from aws_library.ec2 import AWS_TAG_VALUE_MAX_LENGTH, AWSTagKey, AWSTagValue, EC2Tags from fastapi import FastAPI from models_library.docker import DockerGenericTag from models_library.utils.json_serialization import json_dumps -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter from ..constants import ( ACTIVATED_BUFFER_MACHINE_EC2_TAGS, @@ -29,8 +29,10 @@ def get_deactivated_buffer_ec2_tags( base_ec2_tags = ( auto_scaling_mode.get_ec2_tags(app) | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) - base_ec2_tags[AWSTagKey("Name")] = AWSTagValue( - f"{base_ec2_tags[AWSTagKey('Name')]}-buffer" + base_ec2_tags[TypeAdapter(AWSTagKey).validate_python("Name")] = TypeAdapter( + AWSTagValue + ).validate_python( + f"{base_ec2_tags[TypeAdapter(AWSTagKey).validate_python('Name')]}-buffer" ) return base_ec2_tags @@ -43,20 +45,26 @@ def dump_pre_pulled_images_as_tags(images: Iterable[DockerGenericTag]) -> EC2Tag # AWS Tag Values are limited to 256 characaters so we chunk the images # into smaller chunks jsonized_images = json_dumps(images) - assert AWSTagValue.max_length # nosec - if len(jsonized_images) > AWSTagValue.max_length: + assert AWS_TAG_VALUE_MAX_LENGTH # nosec + if len(jsonized_images) > AWS_TAG_VALUE_MAX_LENGTH: # let's chunk the string - chunk_size = AWSTagValue.max_length + chunk_size = AWS_TAG_VALUE_MAX_LENGTH chunks = [ jsonized_images[i : i + chunk_size] for i in range(0, len(jsonized_images), chunk_size) ] return { - AWSTagKey(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): AWSTagValue(c) + TypeAdapter(AWSTagKey) + .validate_python(f"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_({i})"): TypeAdapter( + AWSTagValue + ) + .validate_python(c) for i, c in enumerate(chunks) } return { - PRE_PULLED_IMAGES_EC2_TAG_KEY: parse_obj_as(AWSTagValue, json_dumps(images)) + PRE_PULLED_IMAGES_EC2_TAG_KEY: TypeAdapter(AWSTagValue).validate_python( + json_dumps(images) + ) } @@ -64,7 +72,9 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: # AWS Tag values are limited to 256 characters so we chunk the images if PRE_PULLED_IMAGES_EC2_TAG_KEY in tags: # read directly - return parse_raw_as(list[DockerGenericTag], tags[PRE_PULLED_IMAGES_EC2_TAG_KEY]) + return TypeAdapter(list[DockerGenericTag]).validate_json( + tags[PRE_PULLED_IMAGES_EC2_TAG_KEY] + ) assembled_json = "".join( map( @@ -80,5 +90,5 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: ) ) if assembled_json: - return parse_raw_as(list[DockerGenericTag], assembled_json) + return TypeAdapter(list[DockerGenericTag]).validate_json(assembled_json) return [] From 1a73ba08057daa31ec8df331768a9dc48b3443ef Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 11:42:25 +0200 Subject: [PATCH 212/280] continue upgrading --- .../src/simcore_service_autoscaling/_meta.py | 4 +++- .../simcore_service_autoscaling/constants.py | 24 ++++++++++--------- .../core/errors.py | 5 ---- .../core/settings.py | 4 ++-- .../tests/unit/test_utils_rabbitmq.py | 18 ++++++-------- 5 files changed, 25 insertions(+), 30 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/_meta.py b/services/autoscaling/src/simcore_service_autoscaling/_meta.py index 22d3ea19043..a50c6f7ac93 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/_meta.py +++ b/services/autoscaling/src/simcore_service_autoscaling/_meta.py @@ -1,5 +1,7 @@ from typing import Final +from pydantic import TypeAdapter + from models_library.basic_types import VersionStr, VersionTag from packaging.version import Version from servicelib.utils_meta import PackageInfo @@ -10,7 +12,7 @@ APP_NAME: Final[str] = info.project_name API_VERSION: Final[VersionStr] = info.__version__ VERSION: Final[Version] = info.version -API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) +API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python(info.api_prefix_path_tag) SUMMARY: Final[str] = info.get_summary() diff --git a/services/autoscaling/src/simcore_service_autoscaling/constants.py b/services/autoscaling/src/simcore_service_autoscaling/constants.py index 4415d3eca2c..5ce55ee991c 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/constants.py +++ b/services/autoscaling/src/simcore_service_autoscaling/constants.py @@ -2,13 +2,13 @@ from typing import Final from aws_library.ec2._models import AWSTagKey, AWSTagValue, EC2Tags -from pydantic import parse_obj_as +from pydantic import TypeAdapter -BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "pulling" +BUFFER_MACHINE_PULLING_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "pulling" ) -BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "ssm-command-id" +BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "ssm-command-id" ) PREPULL_COMMAND_NAME: Final[str] = "docker images pulling" @@ -16,18 +16,20 @@ str ] = "docker compose -f /docker-pull.compose.yml -p buffering pull" -PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.pre_pulled_images" +PRE_PULLED_IMAGES_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.pre_pulled_images" ) -BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = parse_obj_as( - AWSTagKey, "io.simcore.autoscaling.buffer_machine" +BUFFER_MACHINE_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python( + "io.simcore.autoscaling.buffer_machine" ) DEACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = { - BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "true") + BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python( + "true" + ) } ACTIVATED_BUFFER_MACHINE_EC2_TAGS: Final[EC2Tags] = { - BUFFER_MACHINE_TAG_KEY: parse_obj_as(AWSTagValue, "false") + BUFFER_MACHINE_TAG_KEY: TypeAdapter(AWSTagValue).validate_python("false") } PRE_PULLED_IMAGES_RE: Final[re.Pattern] = re.compile( rf"{PRE_PULLED_IMAGES_EC2_TAG_KEY}_\((\d+)\)" diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py index 398b1278806..1581c54d85e 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py @@ -1,12 +1,7 @@ -from typing import Any - from models_library.errors_classes import OsparcErrorMixin class AutoscalingRuntimeError(OsparcErrorMixin, RuntimeError): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) - msg_template: str = "Autoscaling unexpected error" diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index bae5f6dc85e..305b2bd9cc2 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Final, cast +from typing import Annotated, Final, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -183,7 +183,7 @@ class NodesMonitoringSettings(BaseCustomSettings): class DaskMonitoringSettings(BaseCustomSettings): - DASK_MONITORING_URL: AnyUrl = Field( + DASK_MONITORING_URL: Annotated[str, AnyUrl] = Field( ..., description="the url to the osparc-dask-scheduler" ) DASK_SCHEDULER_AUTH: InternalClusterAuthentication = Field( diff --git a/services/autoscaling/tests/unit/test_utils_rabbitmq.py b/services/autoscaling/tests/unit/test_utils_rabbitmq.py index 6b6308399d0..7dec11bb556 100644 --- a/services/autoscaling/tests/unit/test_utils_rabbitmq.py +++ b/services/autoscaling/tests/unit/test_utils_rabbitmq.py @@ -18,7 +18,7 @@ ProgressRabbitMessageNode, ProgressType, ) -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from servicelib.rabbitmq import BIND_TO_ALL_TOPICS, RabbitMQClient from settings_library.rabbit import RabbitSettings @@ -78,8 +78,7 @@ async def test_post_task_log_message( "running", ) assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_with_labels.Spec.Name} ), @@ -103,7 +102,7 @@ async def test_post_task_log_message( messages=[f"[cluster] {log_message}"], log_level=0, ) - .json() + .model_dump_json() .encode() ) print("... message received") @@ -125,8 +124,7 @@ async def test_post_task_log_message_does_not_raise_if_service_has_no_labels( ): service_without_labels = await create_service(task_template, {}, "running") assert service_without_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_without_labels.Spec.Name} ), @@ -170,8 +168,7 @@ async def test_post_task_progress_message( "running", ) assert service_with_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_with_labels.Spec.Name} ), @@ -195,7 +192,7 @@ async def test_post_task_progress_message( progress_type=ProgressType.CLUSTER_UP_SCALING, report=ProgressReport(actual_value=progress_value, total=1), ) - .json() + .model_dump_json() .encode() ) print("... message received") @@ -217,8 +214,7 @@ async def test_post_task_progress_does_not_raise_if_service_has_no_labels( ): service_without_labels = await create_service(task_template, {}, "running") assert service_without_labels.Spec - service_tasks = parse_obj_as( - list[Task], + service_tasks = TypeAdapter(list[Task]).validate_python( await async_docker_client.tasks.list( filters={"service": service_without_labels.Spec.Name} ), From 3f781e620e63ff3befb6556ec8c88963004fda93 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 11:49:54 +0200 Subject: [PATCH 213/280] run bump-pydantic --- .../core/settings.py | 63 +++++++++++-------- 1 file changed, 37 insertions(+), 26 deletions(-) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index 3c1e69d85a2..06136b000f9 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -1,6 +1,6 @@ import datetime from functools import cached_property -from typing import Any, ClassVar, Final, Literal, cast +from typing import Final, Literal, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI @@ -12,14 +12,16 @@ ) from models_library.clusters import InternalClusterAuthentication from pydantic import ( + AliasChoices, Field, NonNegativeFloat, NonNegativeInt, PositiveInt, SecretStr, - parse_obj_as, - validator, + TypeAdapter, + field_validator, ) +from pytest_simcore.helpers.dict_tools import ConfigDict from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -34,10 +36,9 @@ class ClustersKeeperEC2Settings(EC2Settings): - class Config(EC2Settings.Config): - env_prefix = CLUSTERS_KEEPER_ENV_PREFIX - - schema_extra: ClassVar[dict[str, Any]] = { # type: ignore[misc] + model_config = ConfigDict( + env_prefix=CLUSTERS_KEEPER_ENV_PREFIX, + json_schema_extra={ "examples": [ { f"{CLUSTERS_KEEPER_ENV_PREFIX}EC2_ACCESS_KEY_ID": "my_access_key_id", @@ -46,7 +47,8 @@ class Config(EC2Settings.Config): f"{CLUSTERS_KEEPER_ENV_PREFIX}EC2_SECRET_ACCESS_KEY": "my_secret_access_key", } ], - } + }, + ) class WorkersEC2InstancesSettings(BaseCustomSettings): @@ -77,7 +79,7 @@ class WorkersEC2InstancesSettings(BaseCustomSettings): # NAME PREFIX is not exposed since we override it anyway WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -108,14 +110,14 @@ class WorkersEC2InstancesSettings(BaseCustomSettings): "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", ) - @validator("WORKERS_EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("WORKERS_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) return value @@ -130,7 +132,7 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): ) PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( ..., - min_items=1, + min_length=1, description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " " this is required to start a new EC2 instance", @@ -182,17 +184,17 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", ) - @validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings # issubclass raises TypeError if used on Aliases - parse_obj_as(list[InstanceTypeType], list(value)) + TypeAdapter(list[InstanceTypeType]).validate_python(list(value)) return value - @validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") + @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod def check_only_one_value( cls, value: dict[str, EC2InstanceBootSpecific] @@ -231,30 +233,35 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- CLUSTERS_KEEPER_DEBUG: bool = Field( - default=False, description="Debug mode", env=["CLUSTERS_KEEPER_DEBUG", "DEBUG"] + default=False, + description="Debug mode", + validation_alias=AliasChoices("CLUSTERS_KEEPER_DEBUG", "DEBUG"), ) CLUSTERS_KEEPER_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["CLUSTERS_KEEPER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + LogLevel.INFO, + validation_alias=AliasChoices( + "CLUSTERS_KEEPER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) CLUSTERS_KEEPER_EC2_ACCESS: ClustersKeeperEC2Settings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: PrimaryEC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES: WorkersEC2InstancesSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: str = Field( @@ -262,14 +269,18 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="set a prefix to all machines created (useful for testing)", ) - CLUSTERS_KEEPER_RABBITMQ: RabbitSettings | None = Field(auto_default_from_env=True) + CLUSTERS_KEEPER_RABBITMQ: RabbitSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) CLUSTERS_KEEPER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - CLUSTERS_KEEPER_REDIS: RedisSettings = Field(auto_default_from_env=True) + CLUSTERS_KEEPER_REDIS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) CLUSTERS_KEEPER_REGISTRY: RegistrySettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) CLUSTERS_KEEPER_TASK_INTERVAL: datetime.timedelta = Field( @@ -320,7 +331,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): def LOG_LEVEL(self) -> LogLevel: # noqa: N802 return self.CLUSTERS_KEEPER_LOGLEVEL - @validator("CLUSTERS_KEEPER_LOGLEVEL") + @field_validator("CLUSTERS_KEEPER_LOGLEVEL") @classmethod def valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) From 0ce5ef45851c99375b8472541e9e754dedaba1f6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 12:21:24 +0200 Subject: [PATCH 214/280] fix pylint --- packages/aws-library/src/aws_library/ec2/_errors.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/_errors.py b/packages/aws-library/src/aws_library/ec2/_errors.py index c39047db00d..1c2a5c9af1f 100644 --- a/packages/aws-library/src/aws_library/ec2/_errors.py +++ b/packages/aws-library/src/aws_library/ec2/_errors.py @@ -1,12 +1,9 @@ # pylint: disable=too-many-ancestors -from typing import Any - from models_library.errors_classes import OsparcErrorMixin class EC2BaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + pass class EC2RuntimeError(EC2BaseError, RuntimeError): From 3f5d881fc7384df76418c956a14fb0d441051c84 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 13:21:50 +0200 Subject: [PATCH 215/280] add base common library --- .github/workflows/ci-testing-deploy.yml | 45 +++++++++++ ci/github/unit-testing/common-library.bash | 43 ++++++++++ packages/common-library/README.md | 11 ++- packages/common-library/VERSION | 2 +- packages/common-library/requirements/Makefile | 6 ++ packages/common-library/requirements/_base.in | 6 ++ .../common-library/requirements/_base.txt | 12 +++ packages/common-library/requirements/_test.in | 21 +++++ .../common-library/requirements/_test.txt | 55 +++++++++++++ .../common-library/requirements/_tools.in | 5 ++ .../common-library/requirements/_tools.txt | 79 +++++++++++++++++++ packages/common-library/requirements/ci.txt | 17 ++++ packages/common-library/requirements/dev.txt | 18 +++++ packages/common-library/setup.cfg | 2 +- packages/common-library/setup.py | 4 +- .../src/common_library/__init__.py | 12 +++ .../common-library/src/common_library/base.py | 2 + packages/common-library/tests/conftest.py | 33 ++++++++ packages/common-library/tests/test_base.py | 4 + 19 files changed, 371 insertions(+), 6 deletions(-) create mode 100755 ci/github/unit-testing/common-library.bash create mode 100644 packages/common-library/requirements/Makefile create mode 100644 packages/common-library/requirements/_base.in create mode 100644 packages/common-library/requirements/_base.txt create mode 100644 packages/common-library/requirements/_test.in create mode 100644 packages/common-library/requirements/_test.txt create mode 100644 packages/common-library/requirements/_tools.in create mode 100644 packages/common-library/requirements/_tools.txt create mode 100644 packages/common-library/requirements/ci.txt create mode 100644 packages/common-library/requirements/dev.txt create mode 100644 packages/common-library/src/common_library/__init__.py create mode 100644 packages/common-library/src/common_library/base.py create mode 100644 packages/common-library/tests/conftest.py create mode 100644 packages/common-library/tests/test_base.py diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 1ec9dd55293..adbea2e8e41 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -54,6 +54,7 @@ jobs: aws-library: ${{ steps.filter.outputs.aws-library }} dask-task-models-library: ${{ steps.filter.outputs.dask-task-models-library }} models-library: ${{ steps.filter.outputs.models-library }} + common-library: ${{ steps.filter.outputs.common-library }} notifications-library: ${{ steps.filter.outputs.notifications-library }} postgres-database: ${{ steps.filter.outputs.postgres-database }} service-integration: ${{ steps.filter.outputs.service-integration }} @@ -110,6 +111,8 @@ jobs: - 'services/docker-compose*' - 'scripts/mypy/*' - 'mypy.ini' + common-library: + - 'packages/common-library/**' notifications-library: - 'packages/notifications-library/**' - 'packages/postgres-database/**' @@ -1593,6 +1596,47 @@ jobs: with: flags: unittests #optional + unit-test-common-library: + needs: changes + if: ${{ needs.changes.outputs.common-library == 'true' || github.event_name == 'push' }} + timeout-minutes: 18 # if this timeout gets too small, then split the tests + name: "[unit] common-library" + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: ["3.11"] + os: [ubuntu-22.04] + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: setup docker buildx + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + - name: setup python environment + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: install uv + uses: yezz123/setup-uv@v4 + - uses: actions/cache@v4 + id: cache-uv + with: + path: ~/.cache/uv + key: ${{ runner.os }}-${{ github.job }}-python-${{ matrix.python }}-uv + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - name: install + run: ./ci/github/unit-testing/common-library.bash install + - name: typecheck + run: ./ci/github/unit-testing/common-library.bash typecheck + - name: test + run: ./ci/github/unit-testing/common-library.bash test + - uses: codecov/codecov-action@v4.5.0 + with: + flags: unittests #optional + unit-test-notifications-library: needs: changes if: ${{ needs.changes.outputs.notifications-library == 'true' || github.event_name == 'push' }} @@ -1704,6 +1748,7 @@ jobs: unit-test-efs-guardian, unit-test-frontend, unit-test-models-library, + unit-test-common-library, unit-test-notifications-library, unit-test-osparc-gateway-server, unit-test-payments, diff --git a/ci/github/unit-testing/common-library.bash b/ci/github/unit-testing/common-library.bash new file mode 100755 index 00000000000..71547174103 --- /dev/null +++ b/ci/github/unit-testing/common-library.bash @@ -0,0 +1,43 @@ +#!/bin/bash +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -o errexit # abort on nonzero exitstatus +set -o nounset # abort on unbound variable +set -o pipefail # don't hide errors within pipes +IFS=$'\n\t' + +install() { + make devenv + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/common-library + make install-ci + popd + uv pip list +} + +test() { + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/common-library + make tests-ci + popd +} + +typecheck() { + # shellcheck source=/dev/null + source .venv/bin/activate + uv pip install mypy + pushd packages/common-library + make mypy + popd +} + +# Check if the function exists (bash specific) +if declare -f "$1" >/dev/null; then + # call arguments verbatim + "$@" +else + # Show a helpful error + echo "'$1' is not a known function name" >&2 + exit 1 +fi diff --git a/packages/common-library/README.md b/packages/common-library/README.md index ee3957a4031..8e30b4405ca 100644 --- a/packages/common-library/README.md +++ b/packages/common-library/README.md @@ -1,4 +1,11 @@ -# simcore pydantic common library +# simcore pydantic models library + +Contains the [pydantic](https://pydantic-docs.helpmanual.io/)-based models for use in the simcore platform. As a reminder pydantic allows creation of python classes that automatically validate their contents based on types. It also provides mechanism to generate json schemas describing the classes internals. + +Requirements to be compatible with the library: + +- be a pydantic-based model +- not a model for use in a REST API (or at least not directly) only for a specific service (ServiceUpdate model for use in a PATCH REST call on the webserver has nothing to do in the library for example, but a base class for it is ok) ## Installation @@ -24,7 +31,7 @@ How run diagnostics on the service metadata published in a docker registry? make devenv source .venv/bin/activate -cd packages/common-library +cd packages/models-library make install-dev ``` 2. Set ``REGISTRY_*`` env vars in ``.env`` (in the repository base folder) diff --git a/packages/common-library/VERSION b/packages/common-library/VERSION index 6e8bf73aa55..0ea3a944b39 100644 --- a/packages/common-library/VERSION +++ b/packages/common-library/VERSION @@ -1 +1 @@ -0.1.0 +0.2.0 diff --git a/packages/common-library/requirements/Makefile b/packages/common-library/requirements/Makefile new file mode 100644 index 00000000000..3f25442b790 --- /dev/null +++ b/packages/common-library/requirements/Makefile @@ -0,0 +1,6 @@ +# +# Targets to pip-compile requirements +# +include ../../../requirements/base.Makefile + +# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt diff --git a/packages/common-library/requirements/_base.in b/packages/common-library/requirements/_base.in new file mode 100644 index 00000000000..e8ecb6f4060 --- /dev/null +++ b/packages/common-library/requirements/_base.in @@ -0,0 +1,6 @@ +# +# Specifies third-party dependencies for 'models-library' +# +--constraint ../../../requirements/constraints.txt + +pydantic diff --git a/packages/common-library/requirements/_base.txt b/packages/common-library/requirements/_base.txt new file mode 100644 index 00000000000..32a8575abea --- /dev/null +++ b/packages/common-library/requirements/_base.txt @@ -0,0 +1,12 @@ +annotated-types==0.7.0 + # via pydantic +pydantic==2.9.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in +pydantic-core==2.23.4 + # via pydantic +typing-extensions==4.12.2 + # via + # pydantic + # pydantic-core diff --git a/packages/common-library/requirements/_test.in b/packages/common-library/requirements/_test.in new file mode 100644 index 00000000000..161f20fda46 --- /dev/null +++ b/packages/common-library/requirements/_test.in @@ -0,0 +1,21 @@ +# +# Specifies dependencies required to run 'models-library' +# +--constraint ../../../requirements/constraints.txt + +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt +# +--constraint _base.txt + +coverage +faker +pytest +pytest-asyncio +pytest-cov +pytest-icdiff +pytest-instafail +pytest-mock +pytest-runner +pytest-sugar +python-dotenv diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt new file mode 100644 index 00000000000..2354abd790d --- /dev/null +++ b/packages/common-library/requirements/_test.txt @@ -0,0 +1,55 @@ +coverage==7.6.1 + # via + # -r requirements/_test.in + # pytest-cov +faker==30.1.0 + # via -r requirements/_test.in +icdiff==2.0.7 + # via pytest-icdiff +iniconfig==2.0.0 + # via pytest +packaging==24.1 + # via + # pytest + # pytest-sugar +pluggy==1.5.0 + # via pytest +pprintpp==0.4.0 + # via pytest-icdiff +pytest==8.3.3 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-cov + # pytest-icdiff + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-cov==5.0.0 + # via -r requirements/_test.in +pytest-icdiff==0.9 + # via -r requirements/_test.in +pytest-instafail==0.5.0 + # via -r requirements/_test.in +pytest-mock==3.14.0 + # via -r requirements/_test.in +pytest-runner==6.0.1 + # via -r requirements/_test.in +pytest-sugar==1.0.0 + # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via faker +python-dotenv==1.0.1 + # via -r requirements/_test.in +six==1.16.0 + # via python-dateutil +termcolor==2.5.0 + # via pytest-sugar +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # faker diff --git a/packages/common-library/requirements/_tools.in b/packages/common-library/requirements/_tools.in new file mode 100644 index 00000000000..1def82c12a3 --- /dev/null +++ b/packages/common-library/requirements/_tools.in @@ -0,0 +1,5 @@ +--constraint ../../../requirements/constraints.txt +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt new file mode 100644 index 00000000000..a333bb822ae --- /dev/null +++ b/packages/common-library/requirements/_tools.txt @@ -0,0 +1,79 @@ +astroid==3.3.5 + # via pylint +black==24.8.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # black + # pip-tools +dill==0.3.9 + # via pylint +distlib==0.3.8 + # via virtualenv +filelock==3.16.1 + # via virtualenv +identify==2.6.1 + # via pre-commit +isort==5.13.2 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.11.2 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.0.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==24.1 + # via + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==24.2 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.6 + # via + # black + # pylint + # virtualenv +pre-commit==4.0.0 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.1 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # pre-commit +ruff==0.6.9 + # via -r requirements/../../../requirements/devenv.txt +setuptools==75.1.0 + # via pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.26.6 + # via pre-commit +wheel==0.44.0 + # via pip-tools diff --git a/packages/common-library/requirements/ci.txt b/packages/common-library/requirements/ci.txt new file mode 100644 index 00000000000..ff171610d5d --- /dev/null +++ b/packages/common-library/requirements/ci.txt @@ -0,0 +1,17 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'models-library' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt + +# installs this repo's packages +pytest-simcore @ ../pytest-simcore + +# current module +simcore-common-library @ . diff --git a/packages/common-library/requirements/dev.txt b/packages/common-library/requirements/dev.txt new file mode 100644 index 00000000000..32d383e9ccc --- /dev/null +++ b/packages/common-library/requirements/dev.txt @@ -0,0 +1,18 @@ +# Shortcut to install all packages needed to develop 'models-library' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +--editable ../pytest-simcore/ + +# current module +--editable . diff --git a/packages/common-library/setup.cfg b/packages/common-library/setup.cfg index 07196483569..b33be52008a 100644 --- a/packages/common-library/setup.cfg +++ b/packages/common-library/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.1.0 +current_version = 0.2.0 commit = True message = packages/common-library version: {current_version} → {new_version} tag = False diff --git a/packages/common-library/setup.py b/packages/common-library/setup.py index 24dded82a04..ceef017dfb1 100644 --- a/packages/common-library/setup.py +++ b/packages/common-library/setup.py @@ -32,7 +32,7 @@ def read_reqs(reqs_path: Path) -> set[str]: SETUP = { "name": "simcore-common-library", "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), - "author": "Sylvain Anderegg (sanderegg)", + "author": "Giancarlo Romeo (giancarloromeo)", "description": "Core service library for simcore pydantic common", "python_requires": "~=3.10", "classifiers": [ @@ -40,7 +40,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ], "long_description": Path(CURRENT_DIR / "README.md").read_text(), "license": "MIT license", diff --git a/packages/common-library/src/common_library/__init__.py b/packages/common-library/src/common_library/__init__.py new file mode 100644 index 00000000000..bc773957a1a --- /dev/null +++ b/packages/common-library/src/common_library/__init__.py @@ -0,0 +1,12 @@ +""" osparc's service models library + +""" + +# +# NOTE: +# - "examples" = [ ...] keyword and NOT "example". See https://json-schema.org/understanding-json-schema/reference/generic.html#annotations +# + +from importlib.metadata import version + +__version__: str = version("simcore-common-library") diff --git a/packages/common-library/src/common_library/base.py b/packages/common-library/src/common_library/base.py new file mode 100644 index 00000000000..f1001ed1f5a --- /dev/null +++ b/packages/common-library/src/common_library/base.py @@ -0,0 +1,2 @@ +class Initial: + pass diff --git a/packages/common-library/tests/conftest.py b/packages/common-library/tests/conftest.py new file mode 100644 index 00000000000..46f09f86b46 --- /dev/null +++ b/packages/common-library/tests/conftest.py @@ -0,0 +1,33 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-import + +import sys +from pathlib import Path + +import common_library +import pytest + +pytest_plugins = [ + "pytest_simcore.pydantic_models", + "pytest_simcore.pytest_global_environs", + "pytest_simcore.repository_paths", + "pytest_simcore.schemas", +] + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +@pytest.fixture(scope="session") +def package_dir(): + pdir = Path(common_library.__file__).resolve().parent + assert pdir.exists() + return pdir + + +@pytest.fixture(scope="session") +def project_slug_dir() -> Path: + folder = CURRENT_DIR.parent + assert folder.exists() + assert any(folder.glob("src/common_library")) + return folder diff --git a/packages/common-library/tests/test_base.py b/packages/common-library/tests/test_base.py new file mode 100644 index 00000000000..24f5b8875a3 --- /dev/null +++ b/packages/common-library/tests/test_base.py @@ -0,0 +1,4 @@ +from common_library.base import Initial + +def test_model(): + assert Initial() From df864b4054144402f20e9da2e31edf1684768437 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 10:41:55 +0200 Subject: [PATCH 216/280] add error classes --- .../common-library/src/common_library/base.py | 2 - .../src/common_library/errors_classes.py | 42 +++++ packages/common-library/tests/test_base.py | 4 - .../tests/test_errors_classes.py | 143 ++++++++++++++++++ 4 files changed, 185 insertions(+), 6 deletions(-) delete mode 100644 packages/common-library/src/common_library/base.py create mode 100644 packages/common-library/src/common_library/errors_classes.py delete mode 100644 packages/common-library/tests/test_base.py create mode 100644 packages/common-library/tests/test_errors_classes.py diff --git a/packages/common-library/src/common_library/base.py b/packages/common-library/src/common_library/base.py deleted file mode 100644 index f1001ed1f5a..00000000000 --- a/packages/common-library/src/common_library/base.py +++ /dev/null @@ -1,2 +0,0 @@ -class Initial: - pass diff --git a/packages/common-library/src/common_library/errors_classes.py b/packages/common-library/src/common_library/errors_classes.py new file mode 100644 index 00000000000..1438536d550 --- /dev/null +++ b/packages/common-library/src/common_library/errors_classes.py @@ -0,0 +1,42 @@ +from pydantic.errors import PydanticErrorMixin + + +class _DefaultDict(dict): + def __missing__(self, key): + return f"'{key}=?'" + + +class OsparcErrorMixin(PydanticErrorMixin): + code: str # type: ignore[assignment] + msg_template: str + + def __new__(cls, *_args, **_kwargs): + if not hasattr(cls, "code"): + cls.code = cls._get_full_class_name() + return super().__new__(cls) + + def __init__(self, *_args, **kwargs) -> None: + self.__dict__ = kwargs + super().__init__(message=self._build_message(), code=self.code) # type: ignore[arg-type] + + def __str__(self) -> str: + return self._build_message() + + def _build_message(self) -> str: + # NOTE: safe. Does not raise KeyError + return self.msg_template.format_map(_DefaultDict(**self.__dict__)) + + @classmethod + def _get_full_class_name(cls) -> str: + relevant_classes = [ + c.__name__ + for c in cls.__mro__[:-1] + if c.__name__ + not in ( + "PydanticErrorMixin", + "OsparcErrorMixin", + "Exception", + "BaseException", + ) + ] + return ".".join(reversed(relevant_classes)) diff --git a/packages/common-library/tests/test_base.py b/packages/common-library/tests/test_base.py deleted file mode 100644 index 24f5b8875a3..00000000000 --- a/packages/common-library/tests/test_base.py +++ /dev/null @@ -1,4 +0,0 @@ -from common_library.base import Initial - -def test_model(): - assert Initial() diff --git a/packages/common-library/tests/test_errors_classes.py b/packages/common-library/tests/test_errors_classes.py new file mode 100644 index 00000000000..ae0ed8c1e3d --- /dev/null +++ b/packages/common-library/tests/test_errors_classes.py @@ -0,0 +1,143 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=no-member + + +from datetime import datetime +from typing import Any + +import pytest +from common_library.errors_classes import OsparcErrorMixin + + +def test_get_full_class_name(): + class A(OsparcErrorMixin): + ... + + class B1(A): + ... + + class B2(A): + ... + + class C(B2): + ... + + class B12(B1, ValueError): + ... + + assert B1._get_full_class_name() == "A.B1" + assert C._get_full_class_name() == "A.B2.C" + assert A._get_full_class_name() == "A" + + # diamond inheritance (not usual but supported) + assert B12._get_full_class_name() == "ValueError.A.B1.B12" + + +def test_error_codes_and_msg_template(): + class MyBaseError(OsparcErrorMixin, Exception): + def __init__(self, **ctx: Any) -> None: + super().__init__(**ctx) # Do not forget this for base exceptions! + + class MyValueError(MyBaseError, ValueError): + msg_template = "Wrong value {value}" + + error = MyValueError(value=42) + + assert error.code == "ValueError.MyBaseError.MyValueError" + assert f"{error}" == "Wrong value 42" + + class MyTypeError(MyBaseError, TypeError): + code = "i_want_this" + msg_template = "Wrong type {type}" + + error = MyTypeError(type="int") + + assert error.code == "i_want_this" + assert f"{error}" == "Wrong type int" + + +def test_error_msg_template_override(): + class MyError(OsparcErrorMixin, Exception): + msg_template = "Wrong value {value}" + + error_override_msg = MyError(msg_template="I want this message") + assert str(error_override_msg) == "I want this message" + + error = MyError(value=42) + assert hasattr(error, "value") + assert str(error) == f"Wrong value {error.value}" + + +def test_error_msg_template_nicer_override(): + class MyError(OsparcErrorMixin, Exception): + msg_template = "Wrong value {value}" + + def __init__(self, msg=None, **ctx: Any) -> None: + super().__init__(**ctx) + # positional argument msg (if defined) overrides the msg_template + if msg: + self.msg_template = msg + + error_override_msg = MyError("I want this message") + assert str(error_override_msg) == "I want this message" + + error = MyError(value=42) + assert hasattr(error, "value") + assert str(error) == f"Wrong value {error.value}" + + +def test_error_with_constructor(): + class MyError(OsparcErrorMixin, ValueError): + msg_template = "Wrong value {value}" + + # handy e.g. autocompletion + def __init__(self, *, my_value: int = 42, **extra): + super().__init__(**extra) + self.value = my_value + + error = MyError(my_value=33, something_else="yes") + assert error.value == 33 + assert str(error) == "Wrong value 33" + assert not hasattr(error, "my_value") + + # the autocompletion does not see this + assert error.something_else == "yes" + + +@pytest.mark.parametrize( + "str_format,ctx,expected", + [ + pytest.param("{value:10}", {"value": "Python"}, "Python ", id="left-align"), + pytest.param( + "{value:>10}", {"value": "Python"}, " Python", id="right-align" + ), + pytest.param( + "{value:^10}", {"value": "Python"}, " Python ", id="center-align" + ), + pytest.param("{v:.2f}", {"v": 3.1415926}, "3.14", id="decimals"), + pytest.param( + "{dt:%Y-%m-%d %H:%M}", + {"dt": datetime(2020, 5, 17, 18, 45)}, + "2020-05-17 18:45", + id="datetime", + ), + ], +) +def test_msg_template_with_different_formats( + str_format: str, ctx: dict[str, Any], expected: str +): + class MyError(OsparcErrorMixin, ValueError): + msg_template = str_format + + error = MyError(**ctx) + assert str(error) == expected + + +def test_missing_keys_in_msg_template_does_not_raise(): + class MyError(OsparcErrorMixin, ValueError): + msg_template = "{value} and {missing}" + + assert str(MyError(value=42)) == "42 and 'missing=?'" From 09c00735c62332000b96c80733306c5084570165 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 13:39:35 +0200 Subject: [PATCH 217/280] point to common library --- .../src/models_library/osparc_variable_identifier.py | 3 ++- .../src/models_library/utils/specs_substitution.py | 3 ++- packages/models-library/tests/test_errors_classes.py | 2 +- .../notifications-library/src/notifications_library/errors.py | 2 +- .../src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/models-library/src/models_library/osparc_variable_identifier.py b/packages/models-library/src/models_library/osparc_variable_identifier.py index a4b0d4e882b..80a8e6d0fc0 100644 --- a/packages/models-library/src/models_library/osparc_variable_identifier.py +++ b/packages/models-library/src/models_library/osparc_variable_identifier.py @@ -1,8 +1,9 @@ from copy import deepcopy from typing import Any, TypeVar +from common_library.errors_classes import OsparcErrorMixin from models_library.basic_types import ConstrainedStr -from models_library.errors_classes import OsparcErrorMixin + from pydantic import BaseModel from .utils.string_substitution import OSPARC_IDENTIFIER_PREFIX diff --git a/packages/models-library/src/models_library/utils/specs_substitution.py b/packages/models-library/src/models_library/utils/specs_substitution.py index 73260237b22..d8a7e9cf161 100644 --- a/packages/models-library/src/models_library/utils/specs_substitution.py +++ b/packages/models-library/src/models_library/utils/specs_substitution.py @@ -1,6 +1,7 @@ from typing import Any, NamedTuple, TypeAlias, cast -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin + from pydantic import StrictBool, StrictFloat, StrictInt from .json_serialization import json_dumps, json_loads diff --git a/packages/models-library/tests/test_errors_classes.py b/packages/models-library/tests/test_errors_classes.py index 754367805e2..ae0ed8c1e3d 100644 --- a/packages/models-library/tests/test_errors_classes.py +++ b/packages/models-library/tests/test_errors_classes.py @@ -9,7 +9,7 @@ from typing import Any import pytest -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin def test_get_full_class_name(): diff --git a/packages/notifications-library/src/notifications_library/errors.py b/packages/notifications-library/src/notifications_library/errors.py index 2ffaa461a02..21edbbb0dc7 100644 --- a/packages/notifications-library/src/notifications_library/errors.py +++ b/packages/notifications-library/src/notifications_library/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class NotifierError(OsparcErrorMixin, Exception): diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py index 65c403853ea..5837beddcae 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class CatalogApiBaseError(OsparcErrorMixin, Exception): From d752d18e03ea3b528e1dec6fafb7491c551c8078 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 13:44:20 +0200 Subject: [PATCH 218/280] fix names --- packages/common-library/README.md | 11 +++-------- packages/common-library/requirements/_base.in | 2 +- packages/common-library/requirements/_test.in | 2 +- packages/common-library/requirements/ci.txt | 2 +- packages/common-library/requirements/dev.txt | 2 +- .../common-library/src/common_library/__init__.py | 2 +- packages/common-library/tests/conftest.py | 2 +- 7 files changed, 9 insertions(+), 14 deletions(-) diff --git a/packages/common-library/README.md b/packages/common-library/README.md index 8e30b4405ca..8e5c489787b 100644 --- a/packages/common-library/README.md +++ b/packages/common-library/README.md @@ -1,11 +1,6 @@ -# simcore pydantic models library +# simcore pydantic common library -Contains the [pydantic](https://pydantic-docs.helpmanual.io/)-based models for use in the simcore platform. As a reminder pydantic allows creation of python classes that automatically validate their contents based on types. It also provides mechanism to generate json schemas describing the classes internals. - -Requirements to be compatible with the library: - -- be a pydantic-based model -- not a model for use in a REST API (or at least not directly) only for a specific service (ServiceUpdate model for use in a PATCH REST call on the webserver has nothing to do in the library for example, but a base class for it is ok) +Contains the common classes, functions and in general utilities for use in the simcore platform. ## Installation @@ -31,7 +26,7 @@ How run diagnostics on the service metadata published in a docker registry? make devenv source .venv/bin/activate -cd packages/models-library +cd packages/common-library make install-dev ``` 2. Set ``REGISTRY_*`` env vars in ``.env`` (in the repository base folder) diff --git a/packages/common-library/requirements/_base.in b/packages/common-library/requirements/_base.in index e8ecb6f4060..73cc02bceb7 100644 --- a/packages/common-library/requirements/_base.in +++ b/packages/common-library/requirements/_base.in @@ -1,5 +1,5 @@ # -# Specifies third-party dependencies for 'models-library' +# Specifies third-party dependencies for 'common-library' # --constraint ../../../requirements/constraints.txt diff --git a/packages/common-library/requirements/_test.in b/packages/common-library/requirements/_test.in index 161f20fda46..4454d79d36a 100644 --- a/packages/common-library/requirements/_test.in +++ b/packages/common-library/requirements/_test.in @@ -1,5 +1,5 @@ # -# Specifies dependencies required to run 'models-library' +# Specifies dependencies required to run 'common-library' # --constraint ../../../requirements/constraints.txt diff --git a/packages/common-library/requirements/ci.txt b/packages/common-library/requirements/ci.txt index ff171610d5d..ed9eb3028e8 100644 --- a/packages/common-library/requirements/ci.txt +++ b/packages/common-library/requirements/ci.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages for the contigous integration (CI) of 'models-library' +# Shortcut to install all packages for the contigous integration (CI) of 'common-library' # # - As ci.txt but w/ tests # diff --git a/packages/common-library/requirements/dev.txt b/packages/common-library/requirements/dev.txt index 32d383e9ccc..02718f95c3a 100644 --- a/packages/common-library/requirements/dev.txt +++ b/packages/common-library/requirements/dev.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages needed to develop 'models-library' +# Shortcut to install all packages needed to develop 'common-library' # # - As ci.txt but with current and repo packages in develop (edit) mode # diff --git a/packages/common-library/src/common_library/__init__.py b/packages/common-library/src/common_library/__init__.py index bc773957a1a..dc0c65ff721 100644 --- a/packages/common-library/src/common_library/__init__.py +++ b/packages/common-library/src/common_library/__init__.py @@ -1,4 +1,4 @@ -""" osparc's service models library +""" osparc's service common library """ diff --git a/packages/common-library/tests/conftest.py b/packages/common-library/tests/conftest.py index 46f09f86b46..124e3c2b022 100644 --- a/packages/common-library/tests/conftest.py +++ b/packages/common-library/tests/conftest.py @@ -9,7 +9,7 @@ import pytest pytest_plugins = [ - "pytest_simcore.pydantic_models", + "pytest_simcore.pydantic_common", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", "pytest_simcore.schemas", From 3f0ec865e2a3fbb0ceda26fbb19f2f84a759a5a2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 13:45:38 +0200 Subject: [PATCH 219/280] remove moved modules --- .../src/models_library/errors_classes.py | 41 ----- .../tests/test_errors_classes.py | 143 ------------------ 2 files changed, 184 deletions(-) delete mode 100644 packages/models-library/src/models_library/errors_classes.py delete mode 100644 packages/models-library/tests/test_errors_classes.py diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/models-library/src/models_library/errors_classes.py deleted file mode 100644 index 6f6d9e0c266..00000000000 --- a/packages/models-library/src/models_library/errors_classes.py +++ /dev/null @@ -1,41 +0,0 @@ -from pydantic.errors import PydanticErrorMixin - - -class _DefaultDict(dict): - def __missing__(self, key): - return f"'{key}=?'" - - -class OsparcErrorMixin(PydanticErrorMixin): - msg_template: str - - def __new__(cls, *_args, **_kwargs): - if not hasattr(cls, "code"): - cls.code = cls._get_full_class_name() # type: ignore[assignment] - return super().__new__(cls) - - def __init__(self, *_args, **kwargs) -> None: - self.__dict__ = kwargs - super().__init__(message=self._build_message(), code=self.code) - - def __str__(self) -> str: - return self._build_message() - - def _build_message(self) -> str: - # NOTE: safe. Does not raise KeyError - return self.msg_template.format_map(_DefaultDict(**self.__dict__)) - - @classmethod - def _get_full_class_name(cls) -> str: - relevant_classes = [ - c.__name__ - for c in cls.__mro__[:-1] - if c.__name__ - not in ( - "PydanticErrorMixin", - "OsparcErrorMixin", - "Exception", - "BaseException", - ) - ] - return ".".join(reversed(relevant_classes)) diff --git a/packages/models-library/tests/test_errors_classes.py b/packages/models-library/tests/test_errors_classes.py deleted file mode 100644 index ae0ed8c1e3d..00000000000 --- a/packages/models-library/tests/test_errors_classes.py +++ /dev/null @@ -1,143 +0,0 @@ -# pylint: disable=protected-access -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=no-member - - -from datetime import datetime -from typing import Any - -import pytest -from common_library.errors_classes import OsparcErrorMixin - - -def test_get_full_class_name(): - class A(OsparcErrorMixin): - ... - - class B1(A): - ... - - class B2(A): - ... - - class C(B2): - ... - - class B12(B1, ValueError): - ... - - assert B1._get_full_class_name() == "A.B1" - assert C._get_full_class_name() == "A.B2.C" - assert A._get_full_class_name() == "A" - - # diamond inheritance (not usual but supported) - assert B12._get_full_class_name() == "ValueError.A.B1.B12" - - -def test_error_codes_and_msg_template(): - class MyBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) # Do not forget this for base exceptions! - - class MyValueError(MyBaseError, ValueError): - msg_template = "Wrong value {value}" - - error = MyValueError(value=42) - - assert error.code == "ValueError.MyBaseError.MyValueError" - assert f"{error}" == "Wrong value 42" - - class MyTypeError(MyBaseError, TypeError): - code = "i_want_this" - msg_template = "Wrong type {type}" - - error = MyTypeError(type="int") - - assert error.code == "i_want_this" - assert f"{error}" == "Wrong type int" - - -def test_error_msg_template_override(): - class MyError(OsparcErrorMixin, Exception): - msg_template = "Wrong value {value}" - - error_override_msg = MyError(msg_template="I want this message") - assert str(error_override_msg) == "I want this message" - - error = MyError(value=42) - assert hasattr(error, "value") - assert str(error) == f"Wrong value {error.value}" - - -def test_error_msg_template_nicer_override(): - class MyError(OsparcErrorMixin, Exception): - msg_template = "Wrong value {value}" - - def __init__(self, msg=None, **ctx: Any) -> None: - super().__init__(**ctx) - # positional argument msg (if defined) overrides the msg_template - if msg: - self.msg_template = msg - - error_override_msg = MyError("I want this message") - assert str(error_override_msg) == "I want this message" - - error = MyError(value=42) - assert hasattr(error, "value") - assert str(error) == f"Wrong value {error.value}" - - -def test_error_with_constructor(): - class MyError(OsparcErrorMixin, ValueError): - msg_template = "Wrong value {value}" - - # handy e.g. autocompletion - def __init__(self, *, my_value: int = 42, **extra): - super().__init__(**extra) - self.value = my_value - - error = MyError(my_value=33, something_else="yes") - assert error.value == 33 - assert str(error) == "Wrong value 33" - assert not hasattr(error, "my_value") - - # the autocompletion does not see this - assert error.something_else == "yes" - - -@pytest.mark.parametrize( - "str_format,ctx,expected", - [ - pytest.param("{value:10}", {"value": "Python"}, "Python ", id="left-align"), - pytest.param( - "{value:>10}", {"value": "Python"}, " Python", id="right-align" - ), - pytest.param( - "{value:^10}", {"value": "Python"}, " Python ", id="center-align" - ), - pytest.param("{v:.2f}", {"v": 3.1415926}, "3.14", id="decimals"), - pytest.param( - "{dt:%Y-%m-%d %H:%M}", - {"dt": datetime(2020, 5, 17, 18, 45)}, - "2020-05-17 18:45", - id="datetime", - ), - ], -) -def test_msg_template_with_different_formats( - str_format: str, ctx: dict[str, Any], expected: str -): - class MyError(OsparcErrorMixin, ValueError): - msg_template = str_format - - error = MyError(**ctx) - assert str(error) == expected - - -def test_missing_keys_in_msg_template_does_not_raise(): - class MyError(OsparcErrorMixin, ValueError): - msg_template = "{value} and {missing}" - - assert str(MyError(value=42)) == "42 and 'missing=?'" From 4a30d09aa0ee42f89b7d8b76f192b5dd8bd2b73b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 13:50:30 +0200 Subject: [PATCH 220/280] fix rename --- packages/common-library/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/common-library/tests/conftest.py b/packages/common-library/tests/conftest.py index 124e3c2b022..46f09f86b46 100644 --- a/packages/common-library/tests/conftest.py +++ b/packages/common-library/tests/conftest.py @@ -9,7 +9,7 @@ import pytest pytest_plugins = [ - "pytest_simcore.pydantic_common", + "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", "pytest_simcore.schemas", From 1e5722084ed54e1e2383f9652221409c8c1df0a2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 14:14:30 +0200 Subject: [PATCH 221/280] add utils --- .../utils/pydantic_fields_extension.py | 22 ++++++ .../src/common_library/utils/serialization.py | 32 ++++++++ .../tests/test_utils_pydantic_extension.py | 76 +++++++++++++++++++ .../tests/test_utils_serialization.py | 30 ++++++++ 4 files changed, 160 insertions(+) create mode 100644 packages/common-library/src/common_library/utils/pydantic_fields_extension.py create mode 100644 packages/common-library/src/common_library/utils/serialization.py create mode 100644 packages/common-library/tests/test_utils_pydantic_extension.py create mode 100644 packages/common-library/tests/test_utils_serialization.py diff --git a/packages/common-library/src/common_library/utils/pydantic_fields_extension.py b/packages/common-library/src/common_library/utils/pydantic_fields_extension.py new file mode 100644 index 00000000000..f2f6d59a5f4 --- /dev/null +++ b/packages/common-library/src/common_library/utils/pydantic_fields_extension.py @@ -0,0 +1,22 @@ +from types import UnionType +from typing import Any, Literal, get_args, get_origin + +from pydantic.fields import FieldInfo + + +def get_type(info: FieldInfo) -> Any: + field_type = info.annotation + if args := get_args(info.annotation): + field_type = next(a for a in args if a != type(None)) + return field_type + + +def is_literal(info: FieldInfo) -> bool: + return get_origin(info.annotation) is Literal + + +def is_nullable(info: FieldInfo) -> bool: + origin = get_origin(info.annotation) # X | None or Optional[X] will return Union + if origin is UnionType: + return any(x in get_args(info.annotation) for x in (type(None), Any)) + return False diff --git a/packages/common-library/src/common_library/utils/serialization.py b/packages/common-library/src/common_library/utils/serialization.py new file mode 100644 index 00000000000..91f41aad795 --- /dev/null +++ b/packages/common-library/src/common_library/utils/serialization.py @@ -0,0 +1,32 @@ +from typing import Any + +from common_library.utils.pydantic_fields_extension import get_type +from pydantic import BaseModel, SecretStr + + +def model_dump_with_secrets( + settings_obj: BaseModel, show_secrets: bool, **pydantic_export_options +) -> dict[str, Any]: + data = settings_obj.model_dump(**pydantic_export_options) + + for field_name in settings_obj.model_fields: + if field_name not in data: + continue + + field_data = data[field_name] + + if isinstance(field_data, SecretStr): + if show_secrets: + data[field_name] = field_data.get_secret_value() + else: + data[field_name] = str(field_data) + elif isinstance(field_data, dict): + field_type = get_type(settings_obj.model_fields[field_name]) + if issubclass(field_type, BaseModel): + data[field_name] = model_dump_with_secrets( + field_type.model_validate(field_data), + show_secrets, + **pydantic_export_options, + ) + + return data diff --git a/packages/common-library/tests/test_utils_pydantic_extension.py b/packages/common-library/tests/test_utils_pydantic_extension.py new file mode 100644 index 00000000000..72634f5762d --- /dev/null +++ b/packages/common-library/tests/test_utils_pydantic_extension.py @@ -0,0 +1,76 @@ +from typing import Literal + +import pytest +from common_library.utils.pydantic_fields_extension import ( + get_type, + is_literal, + is_nullable, +) +from pydantic import BaseModel, Field + + +class MyModel(BaseModel): + a: int + b: float | None = Field(...) + c: str = "bla" + d: bool | None = None + e: Literal["bla"] + + +@pytest.mark.parametrize( + "fn,expected,name", + [ + ( + get_type, + int, + "a", + ), + ( + get_type, + float, + "b", + ), + ( + get_type, + str, + "c", + ), + (get_type, bool, "d"), + ( + is_literal, + False, + "a", + ), + ( + is_literal, + False, + "b", + ), + ( + is_literal, + False, + "c", + ), + (is_literal, False, "d"), + (is_literal, True, "e"), + ( + is_nullable, + False, + "a", + ), + ( + is_nullable, + True, + "b", + ), + ( + is_nullable, + False, + "c", + ), + (is_nullable, True, "d"), + (is_nullable, False, "e"), + ], +) +def test_field_fn(fn, expected, name): + assert expected == fn(MyModel.model_fields[name]) diff --git a/packages/common-library/tests/test_utils_serialization.py b/packages/common-library/tests/test_utils_serialization.py new file mode 100644 index 00000000000..3a55de6026b --- /dev/null +++ b/packages/common-library/tests/test_utils_serialization.py @@ -0,0 +1,30 @@ +from typing import Final + +import pytest +from models_library.utils.serialization import model_dump_with_secrets +from pydantic import BaseModel, SecretStr + + +class Credentials(BaseModel): + USERNAME: str | None = None + PASSWORD: SecretStr | None = None + + +ME: Final[Credentials] = Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) + + +@pytest.mark.parametrize( + "expected,show_secrets", + [ + ( + {"USERNAME": "DeepThought", "PASSWORD": "42"}, + True, + ), + ( + {"USERNAME": "DeepThought", "PASSWORD": "**********"}, + False, # hide secrets + ), + ], +) +def test_model_dump_with_secrets(expected: dict, show_secrets: bool): + assert expected == model_dump_with_secrets(ME, show_secrets=show_secrets) From cb7ac13a2185cda98e578d9578f98e7e9a844a4f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 14:49:55 +0200 Subject: [PATCH 222/280] add common modules --- .../{utils => }/pydantic_fields_extension.py | 0 .../pydantic_networks_extension.py | 5 +++++ .../{utils => }/serialization.py | 2 +- ...n.py => test_pydantic_fields_extension.py} | 6 +----- .../tests/test_pydantic_networks_extension.py | 20 +++++++++++++++++++ ...serialization.py => test_serialization.py} | 2 +- 6 files changed, 28 insertions(+), 7 deletions(-) rename packages/common-library/src/common_library/{utils => }/pydantic_fields_extension.py (100%) create mode 100644 packages/common-library/src/common_library/pydantic_networks_extension.py rename packages/common-library/src/common_library/{utils => }/serialization.py (93%) rename packages/common-library/tests/{test_utils_pydantic_extension.py => test_pydantic_fields_extension.py} (92%) create mode 100644 packages/common-library/tests/test_pydantic_networks_extension.py rename packages/common-library/tests/{test_utils_serialization.py => test_serialization.py} (90%) diff --git a/packages/common-library/src/common_library/utils/pydantic_fields_extension.py b/packages/common-library/src/common_library/pydantic_fields_extension.py similarity index 100% rename from packages/common-library/src/common_library/utils/pydantic_fields_extension.py rename to packages/common-library/src/common_library/pydantic_fields_extension.py diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py new file mode 100644 index 00000000000..b53a2bfc8ae --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -0,0 +1,5 @@ +from typing import Annotated, TypeAlias +from pydantic import AfterValidator, AnyHttpUrl + + +AnyHttpUrlLegacy: TypeAlias = Annotated[str, AnyHttpUrl, AfterValidator(lambda u: u.rstrip("/"))] diff --git a/packages/common-library/src/common_library/utils/serialization.py b/packages/common-library/src/common_library/serialization.py similarity index 93% rename from packages/common-library/src/common_library/utils/serialization.py rename to packages/common-library/src/common_library/serialization.py index 91f41aad795..510bdf6a469 100644 --- a/packages/common-library/src/common_library/utils/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,6 +1,6 @@ from typing import Any -from common_library.utils.pydantic_fields_extension import get_type +from common_library.pydantic_fields_extension import get_type from pydantic import BaseModel, SecretStr diff --git a/packages/common-library/tests/test_utils_pydantic_extension.py b/packages/common-library/tests/test_pydantic_fields_extension.py similarity index 92% rename from packages/common-library/tests/test_utils_pydantic_extension.py rename to packages/common-library/tests/test_pydantic_fields_extension.py index 72634f5762d..50ff5443c41 100644 --- a/packages/common-library/tests/test_utils_pydantic_extension.py +++ b/packages/common-library/tests/test_pydantic_fields_extension.py @@ -1,11 +1,7 @@ from typing import Literal import pytest -from common_library.utils.pydantic_fields_extension import ( - get_type, - is_literal, - is_nullable, -) +from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable from pydantic import BaseModel, Field diff --git a/packages/common-library/tests/test_pydantic_networks_extension.py b/packages/common-library/tests/test_pydantic_networks_extension.py new file mode 100644 index 00000000000..3390f7c2acf --- /dev/null +++ b/packages/common-library/tests/test_pydantic_networks_extension.py @@ -0,0 +1,20 @@ +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy +from pydantic import AnyHttpUrl, TypeAdapter +from pydantic_core import Url + + +def test_any_http_url(): + url = TypeAdapter(AnyHttpUrl).validate_python( + "http://backgroud.testserver.io", + ) + + assert isinstance(url, Url) + assert f"{url}" == "http://backgroud.testserver.io/" # NOTE: trailing '/' added in Pydantic v2 + +def test_any_http_url_legacy(): + url = TypeAdapter(AnyHttpUrlLegacy).validate_python( + "http://backgroud.testserver.io", + ) + + assert isinstance(url, str) + assert url == "http://backgroud.testserver.io" diff --git a/packages/common-library/tests/test_utils_serialization.py b/packages/common-library/tests/test_serialization.py similarity index 90% rename from packages/common-library/tests/test_utils_serialization.py rename to packages/common-library/tests/test_serialization.py index 3a55de6026b..d53db58809c 100644 --- a/packages/common-library/tests/test_utils_serialization.py +++ b/packages/common-library/tests/test_serialization.py @@ -1,7 +1,7 @@ from typing import Final import pytest -from models_library.utils.serialization import model_dump_with_secrets +from common_library.serialization import model_dump_with_secrets from pydantic import BaseModel, SecretStr From 2663ed13288e28da88129596523b888c22abc98f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 14:56:20 +0200 Subject: [PATCH 223/280] update requirements --- packages/settings-library/requirements/ci.txt | 2 +- packages/settings-library/requirements/dev.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index d508c9f3352..30b410dfbb7 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -12,7 +12,7 @@ # installs this repo's packages pytest-simcore @ ../pytest-simcore -simcore-models-library @ ../models-library +simcore-common-library @ ../common-library # current module simcore-settings-library @ . diff --git a/packages/settings-library/requirements/dev.txt b/packages/settings-library/requirements/dev.txt index 0b760b84076..ed4b1395d47 100644 --- a/packages/settings-library/requirements/dev.txt +++ b/packages/settings-library/requirements/dev.txt @@ -1,4 +1,4 @@ -# Shortcut to install all packages needed to develop 'models-library' +# Shortcut to install all packages needed to develop 'settings-library' # # - As ci.txt but with current and repo packages in develop (edit) mode # @@ -13,7 +13,7 @@ # installs this repo's packages --editable ../pytest-simcore/ ---editable ../models-library +--editable ../common-library # current module --editable . From e358cd25866c3e1bd73cd562d78f39ccb55d1864 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:01:56 +0200 Subject: [PATCH 224/280] upgrade package name --- packages/aws-library/src/aws_library/ec2/_errors.py | 2 +- packages/aws-library/src/aws_library/s3/_errors.py | 2 +- packages/aws-library/src/aws_library/ssm/_errors.py | 2 +- .../src/dask_task_models_library/container_tasks/errors.py | 2 +- .../src/simcore_postgres_database/utils_folders.py | 2 +- .../src/simcore_postgres_database/utils_projects_metadata.py | 2 +- .../src/simcore_postgres_database/utils_projects_nodes.py | 2 +- packages/service-integration/src/service_integration/errors.py | 2 +- packages/service-library/src/servicelib/background_task.py | 2 +- packages/service-library/src/servicelib/fastapi/errors.py | 2 +- .../service-library/src/servicelib/fastapi/http_client_thin.py | 2 +- .../src/servicelib/long_running_tasks/_errors.py | 2 +- packages/service-library/src/servicelib/rabbitmq/_errors.py | 2 +- packages/service-library/src/servicelib/redis.py | 2 +- .../simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py | 2 +- .../simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py | 2 +- .../src/simcore_sdk/node_ports_v2/port_validation.py | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/aws-library/src/aws_library/ec2/_errors.py b/packages/aws-library/src/aws_library/ec2/_errors.py index 1c2a5c9af1f..4fb0e611ed2 100644 --- a/packages/aws-library/src/aws_library/ec2/_errors.py +++ b/packages/aws-library/src/aws_library/ec2/_errors.py @@ -1,5 +1,5 @@ # pylint: disable=too-many-ancestors -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class EC2BaseError(OsparcErrorMixin, Exception): diff --git a/packages/aws-library/src/aws_library/s3/_errors.py b/packages/aws-library/src/aws_library/s3/_errors.py index d3202822095..d14105dbd30 100644 --- a/packages/aws-library/src/aws_library/s3/_errors.py +++ b/packages/aws-library/src/aws_library/s3/_errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class S3RuntimeError(OsparcErrorMixin, RuntimeError): diff --git a/packages/aws-library/src/aws_library/ssm/_errors.py b/packages/aws-library/src/aws_library/ssm/_errors.py index 73f050d8d2c..5d3ea16b6c6 100644 --- a/packages/aws-library/src/aws_library/ssm/_errors.py +++ b/packages/aws-library/src/aws_library/ssm/_errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class SSMRuntimeError(OsparcErrorMixin, RuntimeError): diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py index c94f2af10cb..f0a6813ba15 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/errors.py @@ -1,7 +1,7 @@ """ Dask task exceptions """ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class TaskValueError(OsparcErrorMixin, ValueError): diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py index 1abb93a1f5b..6dcca321a73 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_folders.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_folders.py @@ -10,7 +10,7 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from pydantic import ( BaseModel, ConfigDict, diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py index 02e08098d1a..c8aa9962d43 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_metadata.py @@ -5,7 +5,7 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from pydantic import BaseModel, ConfigDict from sqlalchemy.dialects.postgresql import insert as pg_insert diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 413407f254f..cb47141b1ab 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -5,7 +5,7 @@ import sqlalchemy from aiopg.sa.connection import SAConnection -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from pydantic import BaseModel, ConfigDict, Field from sqlalchemy.dialects.postgresql import insert as pg_insert diff --git a/packages/service-integration/src/service_integration/errors.py b/packages/service-integration/src/service_integration/errors.py index f1b5485092c..65521d36371 100644 --- a/packages/service-integration/src/service_integration/errors.py +++ b/packages/service-integration/src/service_integration/errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class ServiceIntegrationError(OsparcErrorMixin, RuntimeError): diff --git a/packages/service-library/src/servicelib/background_task.py b/packages/service-library/src/servicelib/background_task.py index 26f0dd25050..b1eba9bc54b 100644 --- a/packages/service-library/src/servicelib/background_task.py +++ b/packages/service-library/src/servicelib/background_task.py @@ -5,7 +5,7 @@ from collections.abc import AsyncIterator, Awaitable, Callable from typing import Final -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import TryAgain from tenacity.asyncio import AsyncRetrying from tenacity.stop import stop_after_attempt diff --git a/packages/service-library/src/servicelib/fastapi/errors.py b/packages/service-library/src/servicelib/fastapi/errors.py index 136bb7c2fd6..139ed573fbe 100644 --- a/packages/service-library/src/servicelib/fastapi/errors.py +++ b/packages/service-library/src/servicelib/fastapi/errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class ApplicationRuntimeError(OsparcErrorMixin, RuntimeError): diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py index 55daaef69ba..c113321a488 100644 --- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py +++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py @@ -7,7 +7,7 @@ from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response from httpx._types import TimeoutTypes, URLTypes -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log diff --git a/packages/service-library/src/servicelib/long_running_tasks/_errors.py b/packages/service-library/src/servicelib/long_running_tasks/_errors.py index 43fd5b8c6be..44dc03157f2 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_errors.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class BaseLongRunningError(OsparcErrorMixin, Exception): diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 93dbe0cd051..49d74975587 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -1,6 +1,6 @@ from typing import Final -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin _ERROR_PREFIX: Final[str] = "rabbitmq_error" diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index 7bbee359e9d..fce89d7790e 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -10,7 +10,7 @@ import redis.asyncio as aioredis import redis.exceptions -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from pydantic import NonNegativeFloat, NonNegativeInt from redis.asyncio.lock import Lock from redis.asyncio.retry import Retry diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py index 87264ced3da..320cfd7e25f 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py @@ -6,7 +6,7 @@ from asyncio.streams import StreamReader from pathlib import Path -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from aiocache import cached # type: ignore[import-untyped] from models_library.basic_types import IDStr diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 659e42d4c0a..bbfe14e7f39 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -8,7 +8,7 @@ from pathlib import Path from typing import Final -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py index 2c0230be5fd..b33e677c0bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_validation.py @@ -2,7 +2,7 @@ import re from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin from models_library.projects_nodes import UnitStr from models_library.utils.json_schema import ( JsonSchemaValidationError, From 0b8a6abdf2de765674ebec26bc635460d29815d8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:06:18 +0200 Subject: [PATCH 225/280] remove moved modules --- .../utils/pydantic_fields_extension.py | 22 ------ .../src/models_library/utils/serialization.py | 32 -------- .../tests/test_utils_pydantic_extension.py | 76 ------------------- .../tests/test_utils_serialization.py | 30 -------- 4 files changed, 160 deletions(-) delete mode 100644 packages/models-library/src/models_library/utils/pydantic_fields_extension.py delete mode 100644 packages/models-library/src/models_library/utils/serialization.py delete mode 100644 packages/models-library/tests/test_utils_pydantic_extension.py delete mode 100644 packages/models-library/tests/test_utils_serialization.py diff --git a/packages/models-library/src/models_library/utils/pydantic_fields_extension.py b/packages/models-library/src/models_library/utils/pydantic_fields_extension.py deleted file mode 100644 index f2f6d59a5f4..00000000000 --- a/packages/models-library/src/models_library/utils/pydantic_fields_extension.py +++ /dev/null @@ -1,22 +0,0 @@ -from types import UnionType -from typing import Any, Literal, get_args, get_origin - -from pydantic.fields import FieldInfo - - -def get_type(info: FieldInfo) -> Any: - field_type = info.annotation - if args := get_args(info.annotation): - field_type = next(a for a in args if a != type(None)) - return field_type - - -def is_literal(info: FieldInfo) -> bool: - return get_origin(info.annotation) is Literal - - -def is_nullable(info: FieldInfo) -> bool: - origin = get_origin(info.annotation) # X | None or Optional[X] will return Union - if origin is UnionType: - return any(x in get_args(info.annotation) for x in (type(None), Any)) - return False diff --git a/packages/models-library/src/models_library/utils/serialization.py b/packages/models-library/src/models_library/utils/serialization.py deleted file mode 100644 index aae386a88a9..00000000000 --- a/packages/models-library/src/models_library/utils/serialization.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Any - -from models_library.utils.pydantic_fields_extension import get_type -from pydantic import BaseModel, SecretStr - - -def model_dump_with_secrets( - settings_obj: BaseModel, show_secrets: bool, **pydantic_export_options -) -> dict[str, Any]: - data = settings_obj.model_dump(**pydantic_export_options) - - for field_name in settings_obj.model_fields: - if field_name not in data: - continue - - field_data = data[field_name] - - if isinstance(field_data, SecretStr): - if show_secrets: - data[field_name] = field_data.get_secret_value() - else: - data[field_name] = str(field_data) - elif isinstance(field_data, dict): - field_type = get_type(settings_obj.model_fields[field_name]) - if issubclass(field_type, BaseModel): - data[field_name] = model_dump_with_secrets( - field_type.model_validate(field_data), - show_secrets, - **pydantic_export_options, - ) - - return data diff --git a/packages/models-library/tests/test_utils_pydantic_extension.py b/packages/models-library/tests/test_utils_pydantic_extension.py deleted file mode 100644 index 390874fc995..00000000000 --- a/packages/models-library/tests/test_utils_pydantic_extension.py +++ /dev/null @@ -1,76 +0,0 @@ -from typing import Literal - -import pytest -from models_library.utils.pydantic_fields_extension import ( - get_type, - is_literal, - is_nullable, -) -from pydantic import BaseModel, Field - - -class MyModel(BaseModel): - a: int - b: float | None = Field(...) - c: str = "bla" - d: bool | None = None - e: Literal["bla"] - - -@pytest.mark.parametrize( - "fn,expected,name", - [ - ( - get_type, - int, - "a", - ), - ( - get_type, - float, - "b", - ), - ( - get_type, - str, - "c", - ), - (get_type, bool, "d"), - ( - is_literal, - False, - "a", - ), - ( - is_literal, - False, - "b", - ), - ( - is_literal, - False, - "c", - ), - (is_literal, False, "d"), - (is_literal, True, "e"), - ( - is_nullable, - False, - "a", - ), - ( - is_nullable, - True, - "b", - ), - ( - is_nullable, - False, - "c", - ), - (is_nullable, True, "d"), - (is_nullable, False, "e"), - ], -) -def test_field_fn(fn, expected, name): - assert expected == fn(MyModel.model_fields[name]) diff --git a/packages/models-library/tests/test_utils_serialization.py b/packages/models-library/tests/test_utils_serialization.py deleted file mode 100644 index 3a55de6026b..00000000000 --- a/packages/models-library/tests/test_utils_serialization.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Final - -import pytest -from models_library.utils.serialization import model_dump_with_secrets -from pydantic import BaseModel, SecretStr - - -class Credentials(BaseModel): - USERNAME: str | None = None - PASSWORD: SecretStr | None = None - - -ME: Final[Credentials] = Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) - - -@pytest.mark.parametrize( - "expected,show_secrets", - [ - ( - {"USERNAME": "DeepThought", "PASSWORD": "42"}, - True, - ), - ( - {"USERNAME": "DeepThought", "PASSWORD": "**********"}, - False, # hide secrets - ), - ], -) -def test_model_dump_with_secrets(expected: dict, show_secrets: bool): - assert expected == model_dump_with_secrets(ME, show_secrets=show_secrets) From 2b4e79dec241417e9fcc709d80cb0a97831c0b2e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:08:57 +0200 Subject: [PATCH 226/280] update requirements --- packages/models-library/requirements/ci.txt | 1 + packages/models-library/requirements/dev.txt | 1 + packages/settings-library/requirements/ci.txt | 2 +- packages/settings-library/requirements/dev.txt | 2 +- 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/models-library/requirements/ci.txt b/packages/models-library/requirements/ci.txt index e8e9fd2aeab..caaa0bb1b31 100644 --- a/packages/models-library/requirements/ci.txt +++ b/packages/models-library/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-postgres-database[migration] @ ../postgres-database/ pytest-simcore @ ../pytest-simcore diff --git a/packages/models-library/requirements/dev.txt b/packages/models-library/requirements/dev.txt index 901530f3644..bda46a51d85 100644 --- a/packages/models-library/requirements/dev.txt +++ b/packages/models-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../postgres-database/[migration] --editable ../pytest-simcore/ diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index 30b410dfbb7..a3a99f806c7 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -11,8 +11,8 @@ --requirement _test.txt # installs this repo's packages -pytest-simcore @ ../pytest-simcore simcore-common-library @ ../common-library +pytest-simcore @ ../pytest-simcore # current module simcore-settings-library @ . diff --git a/packages/settings-library/requirements/dev.txt b/packages/settings-library/requirements/dev.txt index ed4b1395d47..3ad730e95d1 100644 --- a/packages/settings-library/requirements/dev.txt +++ b/packages/settings-library/requirements/dev.txt @@ -12,8 +12,8 @@ --requirement _tools.txt # installs this repo's packages ---editable ../pytest-simcore/ --editable ../common-library +--editable ../pytest-simcore/ # current module --editable . From e6efb6d253febd556d729055568d32c29b1ab1ce Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:10:19 +0200 Subject: [PATCH 227/280] update requirements --- packages/notifications-library/requirements/ci.txt | 1 + packages/notifications-library/requirements/dev.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/notifications-library/requirements/ci.txt b/packages/notifications-library/requirements/ci.txt index 2ac189e1a32..3432db2c649 100644 --- a/packages/notifications-library/requirements/ci.txt +++ b/packages/notifications-library/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library/ simcore-models-library @ ../models-library/ simcore-postgres-database @ ../postgres-database/ pytest-simcore @ ../pytest-simcore/ diff --git a/packages/notifications-library/requirements/dev.txt b/packages/notifications-library/requirements/dev.txt index 723de763080..4a19b46f46c 100644 --- a/packages/notifications-library/requirements/dev.txt +++ b/packages/notifications-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../postgres-database/ --editable ../pytest-simcore/ From 51c6ca5aceda33e01c83f495ae45296d13b0253f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:16:31 +0200 Subject: [PATCH 228/280] update requirements --- packages/service-library/requirements/ci.txt | 1 + packages/service-library/requirements/ci[aiohttp].txt | 1 + packages/service-library/requirements/ci[all].txt | 1 + packages/service-library/requirements/ci[fastapi].txt | 1 + packages/service-library/requirements/dev.txt | 1 + packages/service-library/requirements/dev[aiohttp].txt | 1 + packages/service-library/requirements/dev[all].txt | 1 + packages/service-library/requirements/dev[fastapi].txt | 1 + packages/service-library/src/servicelib/rabbitmq/_errors.py | 4 ++-- .../src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py | 5 +---- 10 files changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/service-library/requirements/ci.txt b/packages/service-library/requirements/ci.txt index 4d4dea58c9f..b26845819f4 100644 --- a/packages/service-library/requirements/ci.txt +++ b/packages/service-library/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[aiohttp].txt b/packages/service-library/requirements/ci[aiohttp].txt index 721950755b5..ee41e3b69a6 100644 --- a/packages/service-library/requirements/ci[aiohttp].txt +++ b/packages/service-library/requirements/ci[aiohttp].txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[all].txt b/packages/service-library/requirements/ci[all].txt index f7610e97111..f43ee95908f 100644 --- a/packages/service-library/requirements/ci[all].txt +++ b/packages/service-library/requirements/ci[all].txt @@ -13,6 +13,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[fastapi].txt b/packages/service-library/requirements/ci[fastapi].txt index c2c337fd4c0..db051f4ef73 100644 --- a/packages/service-library/requirements/ci[fastapi].txt +++ b/packages/service-library/requirements/ci[fastapi].txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/dev.txt b/packages/service-library/requirements/dev.txt index b4da8c10382..f814830c46b 100644 --- a/packages/service-library/requirements/dev.txt +++ b/packages/service-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library --editable ../settings-library --editable ../pytest-simcore diff --git a/packages/service-library/requirements/dev[aiohttp].txt b/packages/service-library/requirements/dev[aiohttp].txt index 5e0ae847c64..87748e35d29 100644 --- a/packages/service-library/requirements/dev[aiohttp].txt +++ b/packages/service-library/requirements/dev[aiohttp].txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/requirements/dev[all].txt b/packages/service-library/requirements/dev[all].txt index b372254b325..8b23b6105c5 100644 --- a/packages/service-library/requirements/dev[all].txt +++ b/packages/service-library/requirements/dev[all].txt @@ -14,6 +14,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/requirements/dev[fastapi].txt b/packages/service-library/requirements/dev[fastapi].txt index caea1c80fd5..d66370d7904 100644 --- a/packages/service-library/requirements/dev[fastapi].txt +++ b/packages/service-library/requirements/dev[fastapi].txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 49d74975587..105345efe9b 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -10,12 +10,12 @@ class BaseRPCError(OsparcErrorMixin, RuntimeError): class RPCNotInitializedError(BaseRPCError): - code = f"{_ERROR_PREFIX}.not_started" # type: ignore[assignment] + code = f"{_ERROR_PREFIX}.not_started" msg_template = "Please check that the RabbitMQ RPC backend was initialized!" class RemoteMethodNotRegisteredError(BaseRPCError): - code = f"{_ERROR_PREFIX}.remote_not_registered" # type: ignore[assignment] + code = f"{_ERROR_PREFIX}.remote_not_registered" msg_template = ( "Could not find a remote method named: '{method_name}'. " "Message from remote server was returned: {incoming_message}. " diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py index 5837beddcae..d278bb350ba 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/catalog/errors.py @@ -1,11 +1,8 @@ -from typing import Any - from common_library.errors_classes import OsparcErrorMixin class CatalogApiBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + pass class CatalogItemNotFoundError(CatalogApiBaseError): From 4265fec4cb0b46e4957ef1851424239584203b7a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:20:11 +0200 Subject: [PATCH 229/280] update package name --- packages/settings-library/src/settings_library/base.py | 2 +- packages/settings-library/tests/test__pydantic_settings.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 97b50bd6e58..6e7bd0d7b3b 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -2,7 +2,7 @@ from functools import cached_property from typing import Any, Final, get_origin -from models_library.utils.pydantic_fields_extension import ( +from common_library.utils.pydantic_fields_extension import ( get_type, is_literal, is_nullable, diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index 3beeecc6c74..d42b5ad8586 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -15,7 +15,7 @@ from pydantic import ValidationInfo, field_validator from pydantic.fields import PydanticUndefined from pydantic_settings import BaseSettings -from models_library.utils.pydantic_fields_extension import is_nullable +from common_library.utils.pydantic_fields_extension import is_nullable def assert_field_specs( From 5a998a063d90ea36c95fc05e963da1c6f82f52b0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:38:45 +0200 Subject: [PATCH 230/280] add requirement --- packages/settings-library/requirements/_base.in | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/settings-library/requirements/_base.in b/packages/settings-library/requirements/_base.in index 01734738bcb..d94f4ee280f 100644 --- a/packages/settings-library/requirements/_base.in +++ b/packages/settings-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'models-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in pydantic pydantic-settings From fc91024a74c61349f1a0393ec2824c8739aeb8d8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:39:33 +0200 Subject: [PATCH 231/280] add requirement --- packages/service-library/requirements/_base.in | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index b806af36443..e0e928cee52 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -5,6 +5,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in From 0ead0b4ec2a4692b3572929b52125c4ef3fffabf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:44:56 +0200 Subject: [PATCH 232/280] update requirements --- packages/models-library/requirements/_base.in | 1 + packages/models-library/requirements/ci.txt | 1 + packages/models-library/requirements/dev.txt | 1 + 3 files changed, 3 insertions(+) diff --git a/packages/models-library/requirements/_base.in b/packages/models-library/requirements/_base.in index e2f8eae097a..b33d20bdd6b 100644 --- a/packages/models-library/requirements/_base.in +++ b/packages/models-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'models-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in arrow jsonschema diff --git a/packages/models-library/requirements/ci.txt b/packages/models-library/requirements/ci.txt index e8e9fd2aeab..caaa0bb1b31 100644 --- a/packages/models-library/requirements/ci.txt +++ b/packages/models-library/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-postgres-database[migration] @ ../postgres-database/ pytest-simcore @ ../pytest-simcore diff --git a/packages/models-library/requirements/dev.txt b/packages/models-library/requirements/dev.txt index 901530f3644..e8372a6f3f6 100644 --- a/packages/models-library/requirements/dev.txt +++ b/packages/models-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library/ --editable ../postgres-database/[migration] --editable ../pytest-simcore/ From 727c6349575935305d2786bbb7bcaa99c15a835a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:49:06 +0200 Subject: [PATCH 233/280] add py.typed --- packages/common-library/src/common_library/py.typed | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 packages/common-library/src/common_library/py.typed diff --git a/packages/common-library/src/common_library/py.typed b/packages/common-library/src/common_library/py.typed new file mode 100644 index 00000000000..e69de29bb2d From f387ebb1b979c8e6c4fa2aac3685a57c903dd713 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 15:54:19 +0200 Subject: [PATCH 234/280] add me as codeowner --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 89701508fbf..faa7597a371 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,6 +13,7 @@ Makefile @pcrespov @sanderegg /api/ @sanderegg @pcrespov @matusdrobuliak66 /ci/ @sanderegg @pcrespov /docs/ @pcrespov +/packages/common-library/ @giancarloromeo /packages/models-library/ @sanderegg @pcrespov @matusdrobuliak66 /packages/postgres-database/ @matusdrobuliak66 /packages/pytest-simcore/ @pcrespov @sanderegg From 5ed512a04cc659cc4378d7dcb7afcf49d0fb0a56 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 16:06:05 +0200 Subject: [PATCH 235/280] update requirements --- packages/service-library/requirements/_base.in | 1 + packages/service-library/requirements/ci.txt | 1 + packages/service-library/requirements/ci[aiohttp].txt | 1 + packages/service-library/requirements/ci[all].txt | 1 + packages/service-library/requirements/ci[fastapi].txt | 1 + packages/service-library/requirements/dev.txt | 1 + packages/service-library/requirements/dev[aiohttp].txt | 1 + packages/service-library/requirements/dev[all].txt | 1 + packages/service-library/requirements/dev[fastapi].txt | 1 + 9 files changed, 9 insertions(+) diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index b806af36443..e0e928cee52 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -5,6 +5,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/service-library/requirements/ci.txt b/packages/service-library/requirements/ci.txt index 4d4dea58c9f..b26845819f4 100644 --- a/packages/service-library/requirements/ci.txt +++ b/packages/service-library/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[aiohttp].txt b/packages/service-library/requirements/ci[aiohttp].txt index 721950755b5..ee41e3b69a6 100644 --- a/packages/service-library/requirements/ci[aiohttp].txt +++ b/packages/service-library/requirements/ci[aiohttp].txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[all].txt b/packages/service-library/requirements/ci[all].txt index f7610e97111..f43ee95908f 100644 --- a/packages/service-library/requirements/ci[all].txt +++ b/packages/service-library/requirements/ci[all].txt @@ -13,6 +13,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/ci[fastapi].txt b/packages/service-library/requirements/ci[fastapi].txt index c2c337fd4c0..db051f4ef73 100644 --- a/packages/service-library/requirements/ci[fastapi].txt +++ b/packages/service-library/requirements/ci[fastapi].txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ pytest-simcore @ ../pytest-simcore diff --git a/packages/service-library/requirements/dev.txt b/packages/service-library/requirements/dev.txt index b4da8c10382..f814830c46b 100644 --- a/packages/service-library/requirements/dev.txt +++ b/packages/service-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library --editable ../settings-library --editable ../pytest-simcore diff --git a/packages/service-library/requirements/dev[aiohttp].txt b/packages/service-library/requirements/dev[aiohttp].txt index 5e0ae847c64..87748e35d29 100644 --- a/packages/service-library/requirements/dev[aiohttp].txt +++ b/packages/service-library/requirements/dev[aiohttp].txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/requirements/dev[all].txt b/packages/service-library/requirements/dev[all].txt index b372254b325..8b23b6105c5 100644 --- a/packages/service-library/requirements/dev[all].txt +++ b/packages/service-library/requirements/dev[all].txt @@ -14,6 +14,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ diff --git a/packages/service-library/requirements/dev[fastapi].txt b/packages/service-library/requirements/dev[fastapi].txt index caea1c80fd5..d66370d7904 100644 --- a/packages/service-library/requirements/dev[fastapi].txt +++ b/packages/service-library/requirements/dev[fastapi].txt @@ -13,6 +13,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library --editable ../models-library/ --editable ../settings-library/ --editable ../pytest-simcore/ From 7bc87155a027c9a3a5dbec65f3fbc2f3dbad8448 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 16:06:22 +0200 Subject: [PATCH 236/280] update requirements --- packages/aws-library/requirements/_base.in | 1 + packages/dask-task-models-library/requirements/_base.in | 1 + packages/dask-task-models-library/requirements/ci.txt | 1 + packages/dask-task-models-library/requirements/dev.txt | 1 + packages/notifications-library/requirements/_base.in | 1 + packages/notifications-library/requirements/ci.txt | 1 + packages/notifications-library/requirements/dev.txt | 1 + packages/service-integration/requirements/_base.in | 1 + packages/service-integration/requirements/ci.txt | 1 + packages/service-integration/requirements/dev.txt | 1 + packages/settings-library/requirements/_base.in | 5 ++++- packages/settings-library/requirements/ci.txt | 1 + packages/settings-library/requirements/dev.txt | 1 + packages/simcore-sdk/requirements/_base.in | 2 ++ packages/simcore-sdk/requirements/ci.txt | 1 + packages/simcore-sdk/requirements/dev.txt | 1 + 16 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/aws-library/requirements/_base.in b/packages/aws-library/requirements/_base.in index 628cebcf110..d884806f703 100644 --- a/packages/aws-library/requirements/_base.in +++ b/packages/aws-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'aws-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/dask-task-models-library/requirements/_base.in b/packages/dask-task-models-library/requirements/_base.in index 3cdef671c4b..f25da08947b 100644 --- a/packages/dask-task-models-library/requirements/_base.in +++ b/packages/dask-task-models-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'dask-task-models-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/dask-task-models-library/requirements/ci.txt b/packages/dask-task-models-library/requirements/ci.txt index 3e0c9323517..c8775e00d91 100644 --- a/packages/dask-task-models-library/requirements/ci.txt +++ b/packages/dask-task-models-library/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages pytest-simcore @ ../pytest-simcore +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ diff --git a/packages/dask-task-models-library/requirements/dev.txt b/packages/dask-task-models-library/requirements/dev.txt index 0edd20961ac..a9d9555b2e8 100644 --- a/packages/dask-task-models-library/requirements/dev.txt +++ b/packages/dask-task-models-library/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../pytest-simcore/ +--editable ../common-library/ --editable ../models-library/ --editable ../settings-library/ diff --git a/packages/notifications-library/requirements/_base.in b/packages/notifications-library/requirements/_base.in index 4879a9d6fb6..047005b4a39 100644 --- a/packages/notifications-library/requirements/_base.in +++ b/packages/notifications-library/requirements/_base.in @@ -2,6 +2,7 @@ # Specifies third-party dependencies for 'notifications-library' # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/packages/notifications-library/requirements/ci.txt b/packages/notifications-library/requirements/ci.txt index 2ac189e1a32..3432db2c649 100644 --- a/packages/notifications-library/requirements/ci.txt +++ b/packages/notifications-library/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library/ simcore-models-library @ ../models-library/ simcore-postgres-database @ ../postgres-database/ pytest-simcore @ ../pytest-simcore/ diff --git a/packages/notifications-library/requirements/dev.txt b/packages/notifications-library/requirements/dev.txt index 723de763080..0a010051348 100644 --- a/packages/notifications-library/requirements/dev.txt +++ b/packages/notifications-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library/ --editable ../models-library/ --editable ../postgres-database/ --editable ../pytest-simcore/ diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in index fee8aa856e2..dc7e5dd4a6c 100644 --- a/packages/service-integration/requirements/_base.in +++ b/packages/service-integration/requirements/_base.in @@ -3,6 +3,7 @@ # --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in click diff --git a/packages/service-integration/requirements/ci.txt b/packages/service-integration/requirements/ci.txt index 9b4e9f16e2b..46a2035bfc7 100644 --- a/packages/service-integration/requirements/ci.txt +++ b/packages/service-integration/requirements/ci.txt @@ -10,6 +10,7 @@ --requirement _base.txt --requirement _test.txt +simcore-common-library @ ../common-library simcore-models-library @ ../models-library pytest-simcore @ ../pytest-simcore diff --git a/packages/service-integration/requirements/dev.txt b/packages/service-integration/requirements/dev.txt index 9e2af0f7124..bbe3d832532 100644 --- a/packages/service-integration/requirements/dev.txt +++ b/packages/service-integration/requirements/dev.txt @@ -11,6 +11,7 @@ --requirement _test.txt --requirement _tools.txt +--editable ../common-library/ --editable ../models-library/ --editable ../pytest-simcore/ diff --git a/packages/settings-library/requirements/_base.in b/packages/settings-library/requirements/_base.in index ec1d848cc85..9672f593a7f 100644 --- a/packages/settings-library/requirements/_base.in +++ b/packages/settings-library/requirements/_base.in @@ -1,8 +1,11 @@ # -# Specifies third-party dependencies for 'models-library' +# Specifies third-party dependencies for 'settings-library' # --constraint ../../../requirements/constraints.txt +# intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in + pydantic>=1.9 diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index 9feda17bfaa..d950945b44b 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages pytest-simcore @ ../pytest-simcore +simcore-common-library @ ../common-library/ # current module simcore-settings-library @ . diff --git a/packages/settings-library/requirements/dev.txt b/packages/settings-library/requirements/dev.txt index 32d383e9ccc..2168d7f36e1 100644 --- a/packages/settings-library/requirements/dev.txt +++ b/packages/settings-library/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../common-library/ --editable ../pytest-simcore/ # current module diff --git a/packages/simcore-sdk/requirements/_base.in b/packages/simcore-sdk/requirements/_base.in index a07a0b50b01..7999f81151a 100644 --- a/packages/simcore-sdk/requirements/_base.in +++ b/packages/simcore-sdk/requirements/_base.in @@ -7,6 +7,8 @@ --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/common-library/requirements/_base.in + aiocache aiofiles diff --git a/packages/simcore-sdk/requirements/ci.txt b/packages/simcore-sdk/requirements/ci.txt index 25fcdd73354..afeb04529b3 100644 --- a/packages/simcore-sdk/requirements/ci.txt +++ b/packages/simcore-sdk/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-postgres-database @ ../postgres-database pytest-simcore @ ../pytest-simcore +simcore-common-library @ ../common-library simcore-models-library @ ../models-library simcore-settings-library @ ../settings-library/ simcore-service-library @ ../service-library/ diff --git a/packages/simcore-sdk/requirements/dev.txt b/packages/simcore-sdk/requirements/dev.txt index b67f43d8690..c7e7f45b7ed 100644 --- a/packages/simcore-sdk/requirements/dev.txt +++ b/packages/simcore-sdk/requirements/dev.txt @@ -15,6 +15,7 @@ --editable ../pytest-simcore/ --editable ../postgres-database +--editable ../common-library/ --editable ../models-library/ --editable ../settings-library/ From 18ad6d18f199e50057dea7d11154f1706adbeb66 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 16:24:29 +0200 Subject: [PATCH 237/280] update requirements --- services/agent/requirements/_base.in | 1 + services/agent/requirements/ci.txt | 1 + services/agent/requirements/dev.txt | 1 + services/api-server/requirements/_base.in | 1 + services/api-server/requirements/ci.txt | 1 + services/api-server/requirements/dev.txt | 1 + services/autoscaling/requirements/_base.in | 1 + services/autoscaling/requirements/ci.txt | 1 + services/autoscaling/requirements/dev.txt | 1 + services/catalog/requirements/_base.in | 1 + services/catalog/requirements/ci.txt | 1 + services/catalog/requirements/dev.txt | 1 + services/clusters-keeper/requirements/_base.in | 1 + services/clusters-keeper/requirements/ci.txt | 1 + services/clusters-keeper/requirements/dev.txt | 1 + services/dask-sidecar/requirements/_base.in | 1 + services/dask-sidecar/requirements/ci.txt | 1 + services/dask-sidecar/requirements/dev.txt | 1 + services/datcore-adapter/requirements/_base.in | 1 + services/datcore-adapter/requirements/ci.txt | 1 + services/datcore-adapter/requirements/dev.txt | 1 + services/director-v2/requirements/_base.in | 1 + services/director-v2/requirements/ci.txt | 1 + services/director-v2/requirements/dev.txt | 1 + services/dynamic-scheduler/requirements/_base.in | 1 + services/dynamic-scheduler/requirements/ci.txt | 1 + services/dynamic-scheduler/requirements/dev.txt | 1 + services/dynamic-sidecar/requirements/_base.in | 1 + services/dynamic-sidecar/requirements/ci.txt | 1 + services/dynamic-sidecar/requirements/dev.txt | 1 + services/efs-guardian/requirements/_base.in | 1 + services/efs-guardian/requirements/ci.txt | 1 + services/efs-guardian/requirements/dev.txt | 1 + services/invitations/requirements/_base.in | 1 + services/invitations/requirements/ci.txt | 3 ++- services/invitations/requirements/dev.txt | 4 +++- services/payments/requirements/_base.in | 1 + services/payments/requirements/ci.txt | 2 ++ services/payments/requirements/dev.txt | 1 + services/resource-usage-tracker/requirements/_base.in | 1 + services/resource-usage-tracker/requirements/ci.txt | 1 + services/resource-usage-tracker/requirements/dev.txt | 1 + services/storage/requirements/_base.in | 1 + services/storage/requirements/ci.txt | 1 + services/storage/requirements/dev.txt | 1 + 45 files changed, 49 insertions(+), 2 deletions(-) diff --git a/services/agent/requirements/_base.in b/services/agent/requirements/_base.in index 7064a551898..5fc14c5cfe2 100644 --- a/services/agent/requirements/_base.in +++ b/services/agent/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/service-library/requirements/_fastapi.in diff --git a/services/agent/requirements/ci.txt b/services/agent/requirements/ci.txt index 394361bc0ae..f7cb0dd72a4 100644 --- a/services/agent/requirements/ci.txt +++ b/services/agent/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library @ ../../packages/service-library diff --git a/services/agent/requirements/dev.txt b/services/agent/requirements/dev.txt index a20e1ab941d..6f5e8cf7e24 100644 --- a/services/agent/requirements/dev.txt +++ b/services/agent/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library diff --git a/services/api-server/requirements/_base.in b/services/api-server/requirements/_base.in index f63ab332f47..30b633a2e10 100644 --- a/services/api-server/requirements/_base.in +++ b/services/api-server/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/api-server/requirements/ci.txt b/services/api-server/requirements/ci.txt index 7ea96c9234e..9b554714968 100644 --- a/services/api-server/requirements/ci.txt +++ b/services/api-server/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/models-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/api-server/requirements/dev.txt b/services/api-server/requirements/dev.txt index 2de1f4cc316..5afc552d753 100644 --- a/services/api-server/requirements/dev.txt +++ b/services/api-server/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore/ diff --git a/services/autoscaling/requirements/_base.in b/services/autoscaling/requirements/_base.in index ae362ec2744..231b8944c9d 100644 --- a/services/autoscaling/requirements/_base.in +++ b/services/autoscaling/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ../../../services/dask-sidecar/requirements/_dask-distributed.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/aws-library/requirements/_base.in diff --git a/services/autoscaling/requirements/ci.txt b/services/autoscaling/requirements/ci.txt index 6ae3d42db4f..99c6675a719 100644 --- a/services/autoscaling/requirements/ci.txt +++ b/services/autoscaling/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-dask-task-models-library @ ../../packages/dask-task-models-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore diff --git a/services/autoscaling/requirements/dev.txt b/services/autoscaling/requirements/dev.txt index 432e7ef62e9..ab92769203f 100644 --- a/services/autoscaling/requirements/dev.txt +++ b/services/autoscaling/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/catalog/requirements/_base.in b/services/catalog/requirements/_base.in index ef6f55597c3..1394dd65e5e 100644 --- a/services/catalog/requirements/_base.in +++ b/services/catalog/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ../../../requirements/constraints.txt --constraint constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/catalog/requirements/ci.txt b/services/catalog/requirements/ci.txt index 246c4b290a4..4ebc858a454 100644 --- a/services/catalog/requirements/ci.txt +++ b/services/catalog/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/catalog/requirements/dev.txt b/services/catalog/requirements/dev.txt index dccc4f79f39..c9df003398e 100644 --- a/services/catalog/requirements/dev.txt +++ b/services/catalog/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore/ diff --git a/services/clusters-keeper/requirements/_base.in b/services/clusters-keeper/requirements/_base.in index dc3b222d6db..558d68b67cc 100644 --- a/services/clusters-keeper/requirements/_base.in +++ b/services/clusters-keeper/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ../../../services/dask-sidecar/requirements/_dask-distributed.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/aws-library/requirements/_base.in diff --git a/services/clusters-keeper/requirements/ci.txt b/services/clusters-keeper/requirements/ci.txt index 750d3b3a3d3..12125244536 100644 --- a/services/clusters-keeper/requirements/ci.txt +++ b/services/clusters-keeper/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/clusters-keeper/requirements/dev.txt b/services/clusters-keeper/requirements/dev.txt index 5324f4c79f7..faf4378c83d 100644 --- a/services/clusters-keeper/requirements/dev.txt +++ b/services/clusters-keeper/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/dask-sidecar/requirements/_base.in b/services/dask-sidecar/requirements/_base.in index d2fbda72d85..4ff16158cdf 100644 --- a/services/dask-sidecar/requirements/_base.in +++ b/services/dask-sidecar/requirements/_base.in @@ -11,6 +11,7 @@ # - Added as constraints instead of requirements in order to avoid polluting base.txt # - Will be installed when prod.txt or dev.txt # +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/dask-task-models-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/service-library/requirements/_base.in diff --git a/services/dask-sidecar/requirements/ci.txt b/services/dask-sidecar/requirements/ci.txt index 63335c18529..192b4ffcbf0 100644 --- a/services/dask-sidecar/requirements/ci.txt +++ b/services/dask-sidecar/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library/ simcore-dask-task-models-library @ ../../packages/dask-task-models-library/ simcore-models-library @ ../../packages/models-library/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/dask-sidecar/requirements/dev.txt b/services/dask-sidecar/requirements/dev.txt index 82fbeaefec6..6ad6237135b 100644 --- a/services/dask-sidecar/requirements/dev.txt +++ b/services/dask-sidecar/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library/ --editable ../../packages/dask-task-models-library/ --editable ../../packages/models-library/ --editable ../../packages/pytest-simcore/ diff --git a/services/datcore-adapter/requirements/_base.in b/services/datcore-adapter/requirements/_base.in index de131dd6430..bcedb3cc8ed 100644 --- a/services/datcore-adapter/requirements/_base.in +++ b/services/datcore-adapter/requirements/_base.in @@ -4,6 +4,7 @@ # NOTE: ALL version constraints MUST be commented --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in # service-library[fastapi] diff --git a/services/datcore-adapter/requirements/ci.txt b/services/datcore-adapter/requirements/ci.txt index ddd345522ae..f68013921e7 100644 --- a/services/datcore-adapter/requirements/ci.txt +++ b/services/datcore-adapter/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/datcore-adapter/requirements/dev.txt b/services/datcore-adapter/requirements/dev.txt index 73afce79c61..04e2ca59025 100644 --- a/services/datcore-adapter/requirements/dev.txt +++ b/services/datcore-adapter/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/director-v2/requirements/_base.in b/services/director-v2/requirements/_base.in index 2198739ef70..dc173e2c2b6 100644 --- a/services/director-v2/requirements/_base.in +++ b/services/director-v2/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # NOTE: Make sure they are added in setup.install_requires +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/dask-task-models-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in diff --git a/services/director-v2/requirements/ci.txt b/services/director-v2/requirements/ci.txt index bda1da73082..67de20ae339 100644 --- a/services/director-v2/requirements/ci.txt +++ b/services/director-v2/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library/ simcore-dask-task-models-library @ ../../packages/dask-task-models-library/ simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database diff --git a/services/director-v2/requirements/dev.txt b/services/director-v2/requirements/dev.txt index 6d932514ae9..f183201fd55 100644 --- a/services/director-v2/requirements/dev.txt +++ b/services/director-v2/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library/ --editable ../../packages/dask-task-models-library/ --editable ../../packages/models-library --editable ../../packages/postgres-database/ diff --git a/services/dynamic-scheduler/requirements/_base.in b/services/dynamic-scheduler/requirements/_base.in index 74bc0519c82..12ae0b98af0 100644 --- a/services/dynamic-scheduler/requirements/_base.in +++ b/services/dynamic-scheduler/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/dynamic-scheduler/requirements/ci.txt b/services/dynamic-scheduler/requirements/ci.txt index cfd40e964d9..53b69dc323f 100644 --- a/services/dynamic-scheduler/requirements/ci.txt +++ b/services/dynamic-scheduler/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database pytest-simcore @ ../../packages/pytest-simcore diff --git a/services/dynamic-scheduler/requirements/dev.txt b/services/dynamic-scheduler/requirements/dev.txt index 89e4eb7519c..60cb7217e53 100644 --- a/services/dynamic-scheduler/requirements/dev.txt +++ b/services/dynamic-scheduler/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore diff --git a/services/dynamic-sidecar/requirements/_base.in b/services/dynamic-sidecar/requirements/_base.in index 251ca3cedfc..66b47a481b5 100644 --- a/services/dynamic-sidecar/requirements/_base.in +++ b/services/dynamic-sidecar/requirements/_base.in @@ -7,6 +7,7 @@ # NOTE: These input-requirements under packages are tested using latest updates # NOTE: Make sure these packages are added in setup.install_requires +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in # service-library[fastapi] diff --git a/services/dynamic-sidecar/requirements/ci.txt b/services/dynamic-sidecar/requirements/ci.txt index c6040c5bc77..3c2e2adeb92 100644 --- a/services/dynamic-sidecar/requirements/ci.txt +++ b/services/dynamic-sidecar/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/dynamic-sidecar/requirements/dev.txt b/services/dynamic-sidecar/requirements/dev.txt index 2d1c00661ed..ce064f44c52 100644 --- a/services/dynamic-sidecar/requirements/dev.txt +++ b/services/dynamic-sidecar/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database/ --editable ../../packages/pytest-simcore/ diff --git a/services/efs-guardian/requirements/_base.in b/services/efs-guardian/requirements/_base.in index 84e8460fa05..f8db1e50412 100644 --- a/services/efs-guardian/requirements/_base.in +++ b/services/efs-guardian/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/aws-library/requirements/_base.in diff --git a/services/efs-guardian/requirements/ci.txt b/services/efs-guardian/requirements/ci.txt index 85e9fca927f..850a17f5f00 100644 --- a/services/efs-guardian/requirements/ci.txt +++ b/services/efs-guardian/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/efs-guardian/requirements/dev.txt b/services/efs-guardian/requirements/dev.txt index 76ea75d980d..bc86a77da96 100644 --- a/services/efs-guardian/requirements/dev.txt +++ b/services/efs-guardian/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/invitations/requirements/_base.in b/services/invitations/requirements/_base.in index 5ee2336503c..2a775800f93 100644 --- a/services/invitations/requirements/_base.in +++ b/services/invitations/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in # service-library[fastapi] diff --git a/services/invitations/requirements/ci.txt b/services/invitations/requirements/ci.txt index bbc14fe4596..4051e96b178 100644 --- a/services/invitations/requirements/ci.txt +++ b/services/invitations/requirements/ci.txt @@ -11,10 +11,11 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library +simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library simcore-settings-library @ ../../packages/settings-library -simcore-models-library @ ../../packages/models-library # installs current package simcore-service-invitations @ . diff --git a/services/invitations/requirements/dev.txt b/services/invitations/requirements/dev.txt index a45c4db8918..1de98a1f08a 100644 --- a/services/invitations/requirements/dev.txt +++ b/services/invitations/requirements/dev.txt @@ -12,10 +12,12 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library +--editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] --editable ../../packages/settings-library ---editable ../../packages/models-library + # installs current package --editable . diff --git a/services/payments/requirements/_base.in b/services/payments/requirements/_base.in index da3813cc2bb..6c79c0abca7 100644 --- a/services/payments/requirements/_base.in +++ b/services/payments/requirements/_base.in @@ -6,6 +6,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/payments/requirements/ci.txt b/services/payments/requirements/ci.txt index fb0d44c3a77..47e283fa955 100644 --- a/services/payments/requirements/ci.txt +++ b/services/payments/requirements/ci.txt @@ -11,10 +11,12 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library simcore-settings-library @ ../../packages/settings-library + # installs current package simcore-service-payments @ . diff --git a/services/payments/requirements/dev.txt b/services/payments/requirements/dev.txt index b20ade64f27..80aeaf26dbe 100644 --- a/services/payments/requirements/dev.txt +++ b/services/payments/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database --editable ../../packages/pytest-simcore diff --git a/services/resource-usage-tracker/requirements/_base.in b/services/resource-usage-tracker/requirements/_base.in index c71570d1fee..1ed4215b64f 100644 --- a/services/resource-usage-tracker/requirements/_base.in +++ b/services/resource-usage-tracker/requirements/_base.in @@ -7,6 +7,7 @@ # intra-repo required dependencies --requirement ../../../packages/aws-library/requirements/_base.in +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in diff --git a/services/resource-usage-tracker/requirements/ci.txt b/services/resource-usage-tracker/requirements/ci.txt index 9cfb2df141d..2f2df03f1d5 100644 --- a/services/resource-usage-tracker/requirements/ci.txt +++ b/services/resource-usage-tracker/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library pytest-simcore @ ../../packages/pytest-simcore simcore-service-library[fastapi] @ ../../packages/service-library diff --git a/services/resource-usage-tracker/requirements/dev.txt b/services/resource-usage-tracker/requirements/dev.txt index 4fc539932c0..253940c1800 100644 --- a/services/resource-usage-tracker/requirements/dev.txt +++ b/services/resource-usage-tracker/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/pytest-simcore --editable ../../packages/service-library[fastapi] diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 2db40016724..26ba2b4b835 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -6,6 +6,7 @@ --requirement ../../../packages/aws-library/requirements/_base.in +--requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index f3d92884e0e..25d417f91c7 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -12,6 +12,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ +simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index 0b2b3ae2938..97aefedee51 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library/ +--editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database/ --editable ../../packages/pytest-simcore/ From f63a537f2bfda15b20d83ea74f902c066210c27e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 16:30:27 +0200 Subject: [PATCH 238/280] minor fixes --- packages/common-library/.gitignore | 3 --- packages/common-library/VERSION | 2 +- packages/common-library/setup.py | 2 +- services/api-server/requirements/ci.txt | 2 +- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/common-library/.gitignore b/packages/common-library/.gitignore index 98149591573..e69de29bb2d 100644 --- a/packages/common-library/.gitignore +++ b/packages/common-library/.gitignore @@ -1,3 +0,0 @@ - -# erdantic outputs -erd-*.svg diff --git a/packages/common-library/VERSION b/packages/common-library/VERSION index 0ea3a944b39..6e8bf73aa55 100644 --- a/packages/common-library/VERSION +++ b/packages/common-library/VERSION @@ -1 +1 @@ -0.2.0 +0.1.0 diff --git a/packages/common-library/setup.py b/packages/common-library/setup.py index ceef017dfb1..4e381f5bbc2 100644 --- a/packages/common-library/setup.py +++ b/packages/common-library/setup.py @@ -34,7 +34,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Giancarlo Romeo (giancarloromeo)", "description": "Core service library for simcore pydantic common", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/services/api-server/requirements/ci.txt b/services/api-server/requirements/ci.txt index 9b554714968..844706d5fb2 100644 --- a/services/api-server/requirements/ci.txt +++ b/services/api-server/requirements/ci.txt @@ -11,7 +11,7 @@ --requirement _test.txt # installs this repo's packages -simcore-common-library @ ../../packages/models-library +simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ From 4b93c00361cb13b0994fc5a918d926fcd67f2dea Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 16:51:25 +0200 Subject: [PATCH 239/280] use anyhttpurl legacy --- .../fastapi/long_running_tasks/_client.py | 17 +++++++++-------- .../test_long_running_tasks_context_manager.py | 13 +++++++------ 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index f2ad04c9669..8c2c578c67f 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -4,9 +4,10 @@ import warnings from typing import Any, Awaitable, Callable, Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy from fastapi import FastAPI, status from httpx import AsyncClient, HTTPError -from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter +from pydantic import PositiveFloat, TypeAdapter from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -23,7 +24,7 @@ DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 -_ANY_HTTP_URL_ADAPTER: TypeAdapter[AnyHttpUrl] = TypeAdapter(AnyHttpUrl) +_ANY_HTTP_URL_LEGACY_ADAPTER: TypeAdapter[AnyHttpUrlLegacy] = TypeAdapter(AnyHttpUrlLegacy) logger = logging.getLogger(__name__) @@ -115,7 +116,7 @@ class Client: status, result and/or cancel of a long running task. """ - def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: AnyHttpUrl): + def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: str): """ `app`: used byt the `Client` to recover the `ClientConfiguration` `async_client`: an AsyncClient instance used by `Client` @@ -130,8 +131,8 @@ def _client_configuration(self) -> ClientConfiguration: output: ClientConfiguration = self.app.state.long_running_client_configuration return output - def _get_url(self, path: str) -> AnyHttpUrl: - return _ANY_HTTP_URL_ADAPTER.validate_python( + def _get_url(self, path: str) -> str: + return _ANY_HTTP_URL_LEGACY_ADAPTER.validate_python( f"{self._base_url}{self._client_configuration.router_prefix}{path}", ) @@ -141,7 +142,7 @@ async def get_task_status( ) -> TaskStatus: timeout = timeout or self._client_configuration.default_timeout result = await self._async_client.get( - str(self._get_url(f"/task/{task_id}")), + self._get_url(f"/task/{task_id}"), timeout=timeout, ) if result.status_code != status.HTTP_200_OK: @@ -160,7 +161,7 @@ async def get_task_result( ) -> Any | None: timeout = timeout or self._client_configuration.default_timeout result = await self._async_client.get( - str(self._get_url(f"/task/{task_id}/result")), + self._get_url(f"/task/{task_id}/result"), timeout=timeout, ) if result.status_code != status.HTTP_200_OK: @@ -182,7 +183,7 @@ async def cancel_and_delete_task( ) -> None: timeout = timeout or self._client_configuration.default_timeout result = await self._async_client.delete( - str(self._get_url(f"/task/{task_id}")), + self._get_url(f"/task/{task_id}"), timeout=timeout, ) diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py index eefd44f3b64..0dc440bf33e 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py @@ -4,11 +4,12 @@ import asyncio from typing import AsyncIterable, Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import pytest from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient -from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter +from pydantic import PositiveFloat, TypeAdapter from servicelib.fastapi.long_running_tasks._context_manager import _ProgressManager from servicelib.fastapi.long_running_tasks.client import ( Client, @@ -100,7 +101,7 @@ async def test_task_result( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) async with periodic_task_result( client, @@ -120,7 +121,7 @@ async def test_task_result_times_out( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) timeout = TASK_SLEEP_INTERVAL / 10 with pytest.raises(TaskClientTimeoutError) as exec_info: @@ -146,7 +147,7 @@ async def test_task_result_task_result_is_an_error( assert result.status_code == status.HTTP_200_OK, result.text task_id = result.json() - url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io") + url = TypeAdapter(AnyHttpUrlLegacy).validate_python("http://backgroud.testserver.io") client = Client(app=bg_task_app, async_client=async_client, base_url=url) with pytest.raises(TaskClientResultError) as exec_info: async with periodic_task_result( @@ -185,13 +186,13 @@ async def progress_update( assert received == ("", None) for _ in range(repeat): - await progress_updater.update(mock_task_id, percent=ProgressPercent(0.0)) + await progress_updater.update(mock_task_id, percent=TypeAdapter(ProgressPercent).validate_python(0.0)) assert counter == 2 assert received == ("", 0.0) for _ in range(repeat): await progress_updater.update( - mock_task_id, percent=ProgressPercent(1.0), message="done" + mock_task_id, percent=TypeAdapter(ProgressPercent).validate_python(1.0), message="done" ) assert counter == 3 assert received == ("done", 1.0) From 989e76ad76679b826f6262fd4bc798650deca603 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 23:40:43 +0200 Subject: [PATCH 240/280] upgrade reqs --- packages/settings-library/requirements/_base.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index e9d934b2425..fc4a246dc95 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -6,12 +6,14 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -pydantic==2.9.1 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in # pydantic-settings -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pydantic-settings==2.5.2 # via -r requirements/_base.in From fa7072dab557e439f7afab8986e0dad6155c95da Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 7 Oct 2024 23:42:06 +0200 Subject: [PATCH 241/280] update reqs --- packages/settings-library/requirements/ci.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/settings-library/requirements/ci.txt b/packages/settings-library/requirements/ci.txt index 42703a95065..aeacf04c2e5 100644 --- a/packages/settings-library/requirements/ci.txt +++ b/packages/settings-library/requirements/ci.txt @@ -11,9 +11,8 @@ --requirement _test.txt # installs this repo's packages -simcore-common-library @ ../common-library -pytest-simcore @ ../pytest-simcore simcore-common-library @ ../common-library/ +pytest-simcore @ ../pytest-simcore # current module simcore-settings-library @ . From f7d71e433a3ebf61e2893171fba46c5232a15bac Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Tue, 8 Oct 2024 07:47:02 +0200 Subject: [PATCH 242/280] remove out of date comment --- .../src/simcore_service_dynamic_scheduler/cli.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py index 79e91df7e30..38fe9747c8b 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/cli.py @@ -31,13 +31,6 @@ def echo_dotenv(ctx: typer.Context, *, minimal: bool = True): """ assert ctx # nosec - # NOTE: we normally DO NOT USE `os.environ` to capture env vars but this is a special case - # The idea here is to have a command that can generate a **valid** `.env` file that can be used - # to initialized the app. For that reason we fill required fields of the `ApplicationSettings` with - # "fake" but valid values (e.g. generating a password or adding tags as `replace-with-api-key). - # Nonetheless, if the caller of this CLI has already some **valid** env vars in the environment we want to use them ... - # and that is why we use `os.environ`. - settings = ApplicationSettings.create_from_envs() print_as_envfile( From e2252f8c21b5ed52df37be13187624246f289dba Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Tue, 8 Oct 2024 08:24:53 +0200 Subject: [PATCH 243/280] moved to common library --- packages/common-library/requirements/_test.in | 1 + .../common-library/requirements/_test.txt | 23 +++++++++- .../common-library/requirements/_tools.txt | 2 +- .../pydantic_networks_extension.py | 9 +++- .../pydantic_settings_validators.py | 20 +++++++++ .../test_pydantic_settings_validators.py | 42 +++++++++++++++++++ .../core/settings.py | 24 ++++------- 7 files changed, 100 insertions(+), 21 deletions(-) create mode 100644 packages/common-library/src/common_library/pydantic_settings_validators.py create mode 100644 packages/common-library/tests/test_pydantic_settings_validators.py diff --git a/packages/common-library/requirements/_test.in b/packages/common-library/requirements/_test.in index 4454d79d36a..1fe37ac0151 100644 --- a/packages/common-library/requirements/_test.in +++ b/packages/common-library/requirements/_test.in @@ -10,6 +10,7 @@ coverage faker +pydantic-settings pytest pytest-asyncio pytest-cov diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt index 2354abd790d..89b9a19eca6 100644 --- a/packages/common-library/requirements/_test.txt +++ b/packages/common-library/requirements/_test.txt @@ -1,8 +1,12 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==30.1.0 +faker==30.3.0 # via -r requirements/_test.in icdiff==2.0.7 # via pytest-icdiff @@ -16,6 +20,17 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +pydantic==2.9.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pydantic-settings +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic +pydantic-settings==2.5.2 + # via -r requirements/_test.in pytest==8.3.3 # via # -r requirements/_test.in @@ -44,7 +59,9 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings six==1.16.0 # via python-dateutil termcolor==2.5.0 @@ -53,3 +70,5 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # faker + # pydantic + # pydantic-core diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt index a333bb822ae..b5f85d4efcc 100644 --- a/packages/common-library/requirements/_tools.txt +++ b/packages/common-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.5 # via pylint -black==24.8.0 +black==24.10.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index b53a2bfc8ae..75095c39e8d 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -1,5 +1,12 @@ from typing import Annotated, TypeAlias + from pydantic import AfterValidator, AnyHttpUrl -AnyHttpUrlLegacy: TypeAlias = Annotated[str, AnyHttpUrl, AfterValidator(lambda u: u.rstrip("/"))] +def _strip_last_slash(url: str) -> str: + return url.rstrip("/") + + +AnyHttpUrlLegacy: TypeAlias = Annotated[ + str, AnyHttpUrl, AfterValidator(_strip_last_slash) +] diff --git a/packages/common-library/src/common_library/pydantic_settings_validators.py b/packages/common-library/src/common_library/pydantic_settings_validators.py new file mode 100644 index 00000000000..f528e1de9f6 --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_settings_validators.py @@ -0,0 +1,20 @@ +import datetime + +from pydantic import field_validator + + +def _get_float_string_as_seconds( + v: datetime.timedelta | str | float, +) -> datetime.timedelta | float | str: + if isinstance(v, str): + try: + return float(v) + except ValueError: + # returns format like "1:00:00" + return v + return v + + +def validate_timedelta_in_legacy_mode(field: str): + """Transforms a float/int number into a valid datetime as it used to work in the past""" + return field_validator(field, mode="before")(_get_float_string_as_seconds) diff --git a/packages/common-library/tests/test_pydantic_settings_validators.py b/packages/common-library/tests/test_pydantic_settings_validators.py new file mode 100644 index 00000000000..1591b86b937 --- /dev/null +++ b/packages/common-library/tests/test_pydantic_settings_validators.py @@ -0,0 +1,42 @@ +from datetime import timedelta + +import pytest +from common_library.pydantic_settings_validators import ( + validate_timedelta_in_legacy_mode, +) +from faker import Faker +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict + + +def test_validate_timedelta_in_legacy_mode( + monkeypatch: pytest.MonkeyPatch, faker: Faker +): + class Settings(BaseSettings): + APP_NAME: str + REQUEST_TIMEOUT: timedelta = Field(default=timedelta(seconds=40)) + + _legacy_parsing_request_timeout = validate_timedelta_in_legacy_mode( + "REQUEST_TIMEOUT" + ) + + model_config = SettingsConfigDict() + + app_name = faker.pystr() + env_vars: dict[str, str] = {"APP_NAME": app_name} + + # without timedelta + setenvs_from_dict(monkeypatch, env_vars) + settings = Settings() + print(settings.model_dump()) + assert settings.APP_NAME == app_name + assert settings.REQUEST_TIMEOUT == timedelta(seconds=40) + + # with timedelta in seconds + env_vars["REQUEST_TIMEOUT"] = "5555" + setenvs_from_dict(monkeypatch, env_vars) + settings = Settings() + print(settings.model_dump()) + assert settings.APP_NAME == app_name + assert settings.REQUEST_TIMEOUT == timedelta(seconds=5555) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index df1f8996c52..f8ba6e9a215 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -1,7 +1,10 @@ import datetime from functools import cached_property -from pydantic import Field, field_validator, parse_obj_as, validator +from common_library.pydantic_settings_validators import ( + validate_timedelta_in_legacy_mode, +) +from pydantic import Field, parse_obj_as, validator from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag from settings_library.director_v2 import DirectorV2Settings @@ -43,22 +46,9 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ) - # TODO: this should be a common validator put in some common library and not here to allow reuse - # wherever we used timedelta this should be in place otherwise it will fail where we overwrite the - # values via env vars - # GCR we need to talk where to place this one - @field_validator("DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT", mode="before") - @classmethod - def interpret_t_as_seconds( - cls, v: datetime.timedelta | str | float - ) -> datetime.timedelta | float | str: - if isinstance(v, str): - try: - return float(v) - except ValueError: - # returns format like "1:00:00" - return v - return v + _legacy_parsing_dynamic_scheduler_stop_service_timeout = ( + validate_timedelta_in_legacy_mode("DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT") + ) @cached_property def LOG_LEVEL(self): # noqa: N802 From a746b809e0aa05a70c43cbec98c1712ad9fe9e05 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Tue, 8 Oct 2024 08:31:04 +0200 Subject: [PATCH 244/280] fixed broken imports after migration to common_library --- .../rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py | 2 +- .../rpc_interfaces/resource_usage_tracker/errors.py | 2 +- .../tests/deferred_tasks/test_deferred_tasks.py | 2 +- packages/settings-library/src/settings_library/base.py | 6 +----- packages/settings-library/src/settings_library/utils_cli.py | 2 +- packages/settings-library/tests/test__pydantic_settings.py | 2 +- .../src/simcore_service_api_server/exceptions/_base.py | 2 +- .../src/simcore_service_autoscaling/core/errors.py | 2 +- .../src/simcore_service_catalog/exceptions/errors.py | 2 +- .../src/simcore_service_dynamic_sidecar/core/errors.py | 2 +- .../modules/user_services_preferences/_errors.py | 2 +- .../src/simcore_service_efs_guardian/exceptions/_base.py | 2 +- services/web/server/src/simcore_service_webserver/errors.py | 2 +- 13 files changed, 13 insertions(+), 17 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py index 045ba8638a8..6d7bf2a722c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class BaseDynamicSchedulerRPCError(OsparcErrorMixin, Exception): diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 0307dc8c29a..f9c1a24f406 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class ResourceUsageTrackerRuntimeError(OsparcErrorMixin, RuntimeError): diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index 44a0a9379ed..9ea22f87ed1 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -16,8 +16,8 @@ import psutil import pytest from aiohttp.test_utils import unused_port +from common_library.serialization import model_dump_with_secrets from models_library.utils.json_serialization import json_dumps -from models_library.utils.serialization import model_dump_with_secrets from pydantic import NonNegativeFloat, NonNegativeInt from pytest_mock import MockerFixture from servicelib import redis as servicelib_redis diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 61fd6353c00..c3f0e103e7a 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -2,11 +2,7 @@ from functools import cached_property from typing import Any, Final, get_origin -from common_library.utils.pydantic_fields_extension import ( - get_type, - is_literal, - is_nullable, -) +from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable from pydantic import ValidationInfo, field_validator from pydantic.fields import FieldInfo from pydantic_core import PydanticUndefined, ValidationError diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 4bdaf5c0e9c..2c1ab37116d 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -7,7 +7,7 @@ import rich import typer -from models_library.utils.serialization import model_dump_with_secrets +from common_library.serialization import model_dump_with_secrets from pydantic import ValidationError from pydantic_settings import BaseSettings diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index d42b5ad8586..bdc536387fc 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -12,10 +12,10 @@ """ +from common_library.pydantic_fields_extension import is_nullable from pydantic import ValidationInfo, field_validator from pydantic.fields import PydanticUndefined from pydantic_settings import BaseSettings -from common_library.utils.pydantic_fields_extension import is_nullable def assert_field_specs( diff --git a/services/api-server/src/simcore_service_api_server/exceptions/_base.py b/services/api-server/src/simcore_service_api_server/exceptions/_base.py index 2e0b2e13c4f..5ea7664ec23 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/_base.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/_base.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class ApiServerBaseError(OsparcErrorMixin, Exception): diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py index 1581c54d85e..e4294631224 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/errors.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class AutoscalingRuntimeError(OsparcErrorMixin, RuntimeError): diff --git a/services/catalog/src/simcore_service_catalog/exceptions/errors.py b/services/catalog/src/simcore_service_catalog/exceptions/errors.py index c507e657417..8a018cee77a 100644 --- a/services/catalog/src/simcore_service_catalog/exceptions/errors.py +++ b/services/catalog/src/simcore_service_catalog/exceptions/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class CatalogBaseError(OsparcErrorMixin, Exception): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py index 63c6881fd7f..766f44ad652 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py @@ -1,7 +1,7 @@ from typing import Any +from common_library.errors_classes import OsparcErrorMixin from fastapi import status -from models_library.errors_classes import OsparcErrorMixin from models_library.services import RunID diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py index f226502b24d..78e0f28c7d8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_errors.py @@ -1,4 +1,4 @@ -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class BaseServicesPreferencesError(OsparcErrorMixin, Exception): diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py index 61a92118c92..87d55b20572 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/exceptions/_base.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class EfsGuardianBaseError(OsparcErrorMixin, Exception): diff --git a/services/web/server/src/simcore_service_webserver/errors.py b/services/web/server/src/simcore_service_webserver/errors.py index 173699f5888..1bc48eda031 100644 --- a/services/web/server/src/simcore_service_webserver/errors.py +++ b/services/web/server/src/simcore_service_webserver/errors.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.errors_classes import OsparcErrorMixin +from common_library.errors_classes import OsparcErrorMixin class WebServerBaseError(OsparcErrorMixin, Exception): From b8a3f2343501f515a5b76037653862f6dc97713d Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Tue, 8 Oct 2024 09:02:48 +0200 Subject: [PATCH 245/280] fixed errors --- packages/common-library/tests/test_errors_classes.py | 3 +-- .../tests/test_pydantic_settings_validators.py | 10 +++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/common-library/tests/test_errors_classes.py b/packages/common-library/tests/test_errors_classes.py index ae0ed8c1e3d..63674fbd3b4 100644 --- a/packages/common-library/tests/test_errors_classes.py +++ b/packages/common-library/tests/test_errors_classes.py @@ -38,8 +38,7 @@ class B12(B1, ValueError): def test_error_codes_and_msg_template(): class MyBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) # Do not forget this for base exceptions! + pass class MyValueError(MyBaseError, ValueError): msg_template = "Wrong value {value}" diff --git a/packages/common-library/tests/test_pydantic_settings_validators.py b/packages/common-library/tests/test_pydantic_settings_validators.py index 1591b86b937..c3c19784dae 100644 --- a/packages/common-library/tests/test_pydantic_settings_validators.py +++ b/packages/common-library/tests/test_pydantic_settings_validators.py @@ -24,19 +24,19 @@ class Settings(BaseSettings): model_config = SettingsConfigDict() app_name = faker.pystr() - env_vars: dict[str, str] = {"APP_NAME": app_name} + env_vars: dict[str, str | bool] = {"APP_NAME": app_name} # without timedelta setenvs_from_dict(monkeypatch, env_vars) settings = Settings() print(settings.model_dump()) - assert settings.APP_NAME == app_name - assert settings.REQUEST_TIMEOUT == timedelta(seconds=40) + assert app_name == settings.APP_NAME + assert timedelta(seconds=40) == settings.REQUEST_TIMEOUT # with timedelta in seconds env_vars["REQUEST_TIMEOUT"] = "5555" setenvs_from_dict(monkeypatch, env_vars) settings = Settings() print(settings.model_dump()) - assert settings.APP_NAME == app_name - assert settings.REQUEST_TIMEOUT == timedelta(seconds=5555) + assert app_name == settings.APP_NAME + assert timedelta(seconds=5555) == settings.REQUEST_TIMEOUT From cd00aec5a01c18fb438f59a6debbeb7c66fb37da Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 09:24:43 +0200 Subject: [PATCH 246/280] ignore assignment --- packages/service-library/src/servicelib/rabbitmq/_errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 105345efe9b..c105c2b8ff3 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -10,12 +10,12 @@ class BaseRPCError(OsparcErrorMixin, RuntimeError): class RPCNotInitializedError(BaseRPCError): - code = f"{_ERROR_PREFIX}.not_started" + code = f"{_ERROR_PREFIX}.not_started" # type: ignore[assignment] msg_template = "Please check that the RabbitMQ RPC backend was initialized!" class RemoteMethodNotRegisteredError(BaseRPCError): - code = f"{_ERROR_PREFIX}.remote_not_registered" + code = f"{_ERROR_PREFIX}.remote_not_registered" # type: ignore[assignment] msg_template = ( "Could not find a remote method named: '{method_name}'. " "Message from remote server was returned: {incoming_message}. " From b74c9f42df677dd98968aa0eb67805de15538809 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 10:14:14 +0200 Subject: [PATCH 247/280] ensure that both the field name and alias work --- .../models-library/src/models_library/projects_nodes_io.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 412a6927462..bee43a4b3dc 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -176,6 +176,10 @@ def legacy_enforce_str_to_int(cls, v): return int(v) return v + model_config = ConfigDict( + populate_by_name=True + ) + class SimCoreFileLink(BaseFileLink): """I/O port type to hold a link to a file in simcore S3 storage""" From e0c83b0309954e74a0473ec95de4e8033e8e5973 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 11:30:53 +0200 Subject: [PATCH 248/280] fix tests --- .../tests/fastapi/test_http_client_thin.py | 4 +- .../tests/fastapi/test_openapi.py | 2 +- .../simcore_sdk/node_ports_v2/nodeports_v2.py | 21 +++++-- .../unit/test_node_ports_v2_nodeports_v2.py | 55 ++++++++++++++++++- 4 files changed, 75 insertions(+), 7 deletions(-) diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index dfe11467518..7bd96b25eee 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -3,6 +3,7 @@ import logging from collections.abc import AsyncIterable, Iterable from typing import Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import arrow import pytest @@ -77,7 +78,8 @@ async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: @pytest.fixture def test_url() -> str: - return str(TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111")) + url =TypeAdapter(AnyHttpUrlLegacy).validate_python("http://missing-host:1111") + return f"{url}" async def test_connection_error( diff --git a/packages/service-library/tests/fastapi/test_openapi.py b/packages/service-library/tests/fastapi/test_openapi.py index 0edd438c73a..54f7e017799 100644 --- a/packages/service-library/tests/fastapi/test_openapi.py +++ b/packages/service-library/tests/fastapi/test_openapi.py @@ -44,7 +44,7 @@ def test_exclusive_min_openapi_issue(app: FastAPI): def test_overriding_openapi_method(app: FastAPI): assert not hasattr(app, "_original_openapi") - assert app.openapi.__doc__ is None + #assert app.openapi.__doc__ is None # PC why was this set to check that it is none? it's coming from the base fastapi applicaiton and now they provide some docs override_fastapi_openapi_method(app) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index edf24e6e297..f7bd21ba2bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -1,8 +1,12 @@ +from asyncio import Task +import traceback import logging from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any +from pydantic_core import InitErrorDetails + from models_library.api_schemas_storage import LinkType from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr @@ -26,6 +30,16 @@ log = logging.getLogger(__name__) +def _format_error(task:Task)-> str: + # pylint:disable=protected-access + assert task._exception #nosec + error_list= traceback.format_exception(type(task._exception), task._exception, task._exception.__traceback__) + return "\n".join(error_list) + +def _get_error_details(task:Task, port_key:str)->InitErrorDetails: + # pylint:disable=protected-access + return InitErrorDetails(type="value_error", loc=(f"{port_key}",), input=_format_error(task), ctx={"error":task._exception}) + class Nodeports(BaseModel): """ Represents a node in a project and all its input/output ports @@ -180,9 +194,8 @@ async def set_multiple( await self.save_to_db_cb(self) # groups all ValidationErrors pre-pending 'port_key' to loc and raises ValidationError - if errors := [ - list(flatten_errors([r], self.__config__, loc=(f"{port_key}",))) + if error_details:= [ + _get_error_details(r, port_key) for port_key, r in zip(port_values.keys(), results) - if isinstance(r, ValidationError) ]: - raise ValidationError(errors, model=type(self)) + raise ValidationError.from_exception_data(title="Multiple port_key errors",line_errors=error_details) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 1bc1fcde664..86db4923337 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -3,9 +3,11 @@ # pylint:disable=redefined-outer-name # pylint:disable=protected-access +import asyncio from pathlib import Path from typing import Any, Callable +from pydantic import ValidationError import pytest from faker import Faker from pytest_mock import MockFixture @@ -14,7 +16,7 @@ from simcore_sdk.node_ports_v2 import Nodeports, exceptions, ports from simcore_sdk.node_ports_v2.ports_mapping import InputsList, OutputsList from utils_port_v2 import create_valid_port_mapping - +from pytest_mock import MockerFixture @pytest.mark.parametrize( "auto_update", @@ -221,3 +223,54 @@ async def test_node_ports_v2_packages( db_manager = mock_db_manager(default_configuration) node_ports = await ports(user_id, project_id, node_uuid) node_ports = await ports(user_id, project_id, node_uuid, db_manager=db_manager) + + +@pytest.fixture +def mock_port_set(mocker: MockFixture)->None: + async def _always_raise_error(*args, **kwargs): + async def _i_raise_errors(): + raise ValidationError("invalid") + return asyncio.create_task(_i_raise_errors()) + + mocker.patch("simcore_sdk.node_ports_v2.port.Port._set", side_effect=_always_raise_error) + +async def test_node_ports_v2_set_multiple_catch_multiple_failing_set_ports( + mock_port_set:None, + mock_db_manager: Callable, + default_configuration: dict[str, Any], + user_id: int, + project_id: str, + node_uuid: str, + faker: Faker, +): + db_manager = mock_db_manager(default_configuration) + + original_inputs = create_valid_port_mapping(InputsList, suffix="original") + original_outputs = create_valid_port_mapping(OutputsList, suffix="original") + + + async def _mock_callback(*args,**kwargs): + pass + + node_ports = Nodeports( + inputs=original_inputs, + outputs=original_outputs, + db_manager=db_manager, + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + io_log_redirect_cb=None, + save_to_db_cb=_mock_callback, + node_port_creator_cb=_mock_callback, + auto_update=False, + ) + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + with pytest.raises(ValidationError): + await node_ports.set_multiple( + { + port.key: (port.value, None) + for port in list(original_inputs.values()) + + list(original_outputs.values()) + }, + progress_bar=progress_bar, + ) From bcb907d365963b2ffb7dccf109f0e0fa75b01544 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 11:45:33 +0200 Subject: [PATCH 249/280] fix test --- .../simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py | 1 + .../simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index f7bd21ba2bf..d0ef9eb14bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -197,5 +197,6 @@ async def set_multiple( if error_details:= [ _get_error_details(r, port_key) for port_key, r in zip(port_values.keys(), results) + if r is not None ]: raise ValidationError.from_exception_data(title="Multiple port_key errors",line_errors=error_details) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 86db4923337..856b4b268b1 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -16,7 +16,7 @@ from simcore_sdk.node_ports_v2 import Nodeports, exceptions, ports from simcore_sdk.node_ports_v2.ports_mapping import InputsList, OutputsList from utils_port_v2 import create_valid_port_mapping -from pytest_mock import MockerFixture + @pytest.mark.parametrize( "auto_update", From 9ca9fbc16649f2588e2d77aae25ab8b0de5509b6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 11:55:16 +0200 Subject: [PATCH 250/280] update reqs --- packages/postgres-database/requirements/_base.in | 1 + packages/postgres-database/requirements/ci.txt | 2 +- packages/postgres-database/requirements/dev.txt | 3 ++- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in index 48679f44663..0294edf9114 100644 --- a/packages/postgres-database/requirements/_base.in +++ b/packages/postgres-database/requirements/_base.in @@ -3,6 +3,7 @@ # --constraint ../../../requirements/constraints.txt --constraint ./constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in alembic pydantic diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index ffac2669760..c384182e2c8 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -12,8 +12,8 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library/ pytest-simcore @ ../../packages/pytest-simcore/ -simcore-models-library @ ../models-library # current module simcore-postgres-database @ . diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt index a05947becdf..69fadfc182a 100644 --- a/packages/postgres-database/requirements/dev.txt +++ b/packages/postgres-database/requirements/dev.txt @@ -13,8 +13,9 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/common-library/ --editable ../../packages/pytest-simcore/ ---editable ../models-library/ + # current module --editable . From 25d27358f2ec7c1734361fe64440b7c85c346267 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 12:19:58 +0200 Subject: [PATCH 251/280] update reqs --- packages/aws-library/requirements/ci.txt | 3 ++- packages/aws-library/requirements/dev.txt | 3 ++- packages/postgres-database/requirements/ci.txt | 2 +- packages/postgres-database/requirements/dev.txt | 4 ++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/aws-library/requirements/ci.txt b/packages/aws-library/requirements/ci.txt index 7fcd69a010f..f39ca4e3672 100644 --- a/packages/aws-library/requirements/ci.txt +++ b/packages/aws-library/requirements/ci.txt @@ -11,8 +11,9 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library +simcore-models-library @ ../models-library/ pytest-simcore @ ../pytest-simcore -simcore-models-library @ ../models-library simcore-service-library @ ../service-library/ simcore-settings-library @ ../settings-library/ diff --git a/packages/aws-library/requirements/dev.txt b/packages/aws-library/requirements/dev.txt index f8956725407..34cc644b370 100644 --- a/packages/aws-library/requirements/dev.txt +++ b/packages/aws-library/requirements/dev.txt @@ -12,8 +12,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../pytest-simcore +--editable ../common-library/ --editable ../models-library/ +--editable ../pytest-simcore/ --editable ../service-library/ --editable ../settings-library/ diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index c384182e2c8..8df1aecf670 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -13,7 +13,7 @@ # installs this repo's packages simcore-common-library @ ../common-library/ -pytest-simcore @ ../../packages/pytest-simcore/ +pytest-simcore @ ../pytest-simcore/ # current module simcore-postgres-database @ . diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt index 69fadfc182a..095f8383b2a 100644 --- a/packages/postgres-database/requirements/dev.txt +++ b/packages/postgres-database/requirements/dev.txt @@ -13,8 +13,8 @@ --requirement _tools.txt # installs this repo's packages ---editable ../../packages/common-library/ ---editable ../../packages/pytest-simcore/ +--editable ../common-library/ +--editable ../pytest-simcore/ # current module From 0f204cdcec375b3a375912b7ffab1b2a7c22497c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 12:48:55 +0200 Subject: [PATCH 252/280] fix httpurl field --- .../src/common_library/pydantic_networks_extension.py | 5 ++++- .../payments/src/simcore_service_payments/core/settings.py | 6 +++--- .../simcore_service_payments/services/payments_gateway.py | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index 75095c39e8d..8d269f3fbb3 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -1,6 +1,6 @@ from typing import Annotated, TypeAlias -from pydantic import AfterValidator, AnyHttpUrl +from pydantic import AfterValidator, AnyHttpUrl, HttpUrl def _strip_last_slash(url: str) -> str: @@ -10,3 +10,6 @@ def _strip_last_slash(url: str) -> str: AnyHttpUrlLegacy: TypeAlias = Annotated[ str, AnyHttpUrl, AfterValidator(_strip_last_slash) ] + + +HttpUrlLegacy: TypeAlias = Annotated[str, HttpUrl, AfterValidator(_strip_last_slash)] diff --git a/services/payments/src/simcore_service_payments/core/settings.py b/services/payments/src/simcore_service_payments/core/settings.py index 78759c3cc7f..ba2bd923954 100644 --- a/services/payments/src/simcore_service_payments/core/settings.py +++ b/services/payments/src/simcore_service_payments/core/settings.py @@ -1,11 +1,11 @@ from functools import cached_property +from common_library.pydantic_networks_extension import HttpUrlLegacy from models_library.basic_types import NonNegativeDecimal from pydantic import ( AliasChoices, EmailStr, Field, - HttpUrl, PositiveFloat, SecretStr, TypeAdapter, @@ -65,7 +65,7 @@ class ApplicationSettings(_BaseApplicationSettings): These settings includes extra configuration for the http-API """ - PAYMENTS_GATEWAY_URL: HttpUrl = Field( + PAYMENTS_GATEWAY_URL: HttpUrlLegacy = Field( ..., description="Base url to the payment gateway" ) @@ -125,7 +125,7 @@ class ApplicationSettings(_BaseApplicationSettings): json_schema_extra={"auto_default_from_env": True}, ) - PAYMENTS_STRIPE_URL: HttpUrl = Field( + PAYMENTS_STRIPE_URL: HttpUrlLegacy = Field( ..., description="Base url to the payment Stripe" ) PAYMENTS_STRIPE_API_SECRET: SecretStr = Field( diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index d2789c67221..8410dcc15db 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -12,11 +12,11 @@ from contextlib import suppress import httpx +from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from fastapi.encoders import jsonable_encoder from httpx import URL, HTTPStatusError from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID -from models_library.error_classes import OsparcErrorMixin from pydantic import TypeAdapter, ValidationError from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.fastapi.http_client import ( From 8981b412e7c3768206abaa7b0499f0163c79053d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 13:10:31 +0200 Subject: [PATCH 253/280] upgrade reqs --- services/web/server/requirements/_base.txt | 200 +++++++++++++++++++- services/web/server/requirements/_test.txt | 7 +- services/web/server/requirements/_tools.txt | 8 - 3 files changed, 199 insertions(+), 16 deletions(-) diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index ab574d6c39f..fd0073f3b01 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -24,17 +24,30 @@ aiofiles==0.8.0 # -r requirements/_base.in aiohttp==3.8.5 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -72,6 +85,8 @@ alembic==1.8.1 # via # -r requirements/../../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -88,7 +103,6 @@ async-timeout==4.0.3 # via # aiohttp # aiopg - # redis asyncpg==0.27.0 # via # -r requirements/_base.in @@ -105,17 +119,30 @@ captcha==0.5.0 # via -r requirements/_base.in certifi==2023.7.22 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -130,17 +157,30 @@ click==8.1.3 # via typer cryptography==41.0.7 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -148,7 +188,7 @@ cryptography==41.0.7 # aiohttp-session dnspython==2.2.1 # via email-validator -email-validator==1.2.1 +email-validator==2.2.0 # via pydantic et-xmlfile==1.1.0 # via openpyxl @@ -181,17 +221,30 @@ jinja-app-loader==1.0.2 # via -r requirements/_base.in jinja2==3.1.2 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -214,17 +267,30 @@ lazy-object-proxy==1.7.1 # via openapi-core mako==1.2.2 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -253,17 +319,30 @@ openpyxl==3.0.9 # via -r requirements/_base.in orjson==3.10.0 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -296,39 +375,84 @@ pycountry==23.12.11 # via -r requirements/_base.in pycparser==2.21 # via cffi -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt # -c requirements/./constraints.txt + # -r requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/_base.in + # -r requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.15.1 # via rich pyinstrument==4.6.1 @@ -343,6 +467,8 @@ python-dateutil==2.8.2 # via # arrow # faker +python-dotenv==1.0.1 + # via pydantic-settings python-engineio==4.3.4 # via python-socketio python-magic==0.4.25 @@ -353,17 +479,30 @@ pytz==2022.1 # via twilio pyyaml==6.0.1 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -373,17 +512,30 @@ pyyaml==6.0.1 # openapi-spec-validator redis==5.0.4 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -418,17 +570,30 @@ sniffio==1.3.1 # via anyio sqlalchemy==1.4.47 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt @@ -468,37 +633,64 @@ typing-extensions==4.12.0 # aiodocker # faststream # pydantic + # pydantic-core # typer ujson==5.5.0 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt # aiohttp-swagger urllib3==1.26.11 # via + # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 28185fac32f..5ded209534a 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -18,7 +18,6 @@ async-timeout==4.0.3 # via # -c requirements/_base.txt # aiohttp - # redis asyncpg==0.27.0 # via # -c requirements/_base.txt @@ -174,7 +173,9 @@ python-dateutil==2.8.2 # -c requirements/_base.txt # faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in pyyaml==6.0.1 # via # -c requirements/../../../../requirements/constraints.txt @@ -217,8 +218,6 @@ tenacity==8.5.0 # -r requirements/_test.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via coverage types-aiofiles==24.1.0.20240626 # via -r requirements/_test.in types-jsonschema==4.23.0.20240813 diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index 4d6682cf5c2..010723ce3ae 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -81,14 +81,6 @@ setuptools==69.1.1 # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint types-cachetools==5.5.0.20240820 From 50c8af446e48b179fc00f2d4daa9aa37d8cdb9b9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 13:18:53 +0200 Subject: [PATCH 254/280] upgrade reqs --- .../requirements/requirements.txt | 28 ++- tests/public-api/requirements/_base.txt | 20 ++- tests/public-api/requirements/_test.txt | 10 -- tests/public-api/requirements/_tools.txt | 11 -- tests/swarm-deploy/requirements/_test.txt | 170 ++++++++++++++++-- tests/swarm-deploy/requirements/_tools.txt | 10 -- 6 files changed, 195 insertions(+), 54 deletions(-) diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index d1641743383..1ae402aba4e 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -1,5 +1,5 @@ -exceptiongroup==1.2.2 - # via pytest +annotated-types==0.7.0 + # via pydantic iniconfig==2.0.0 # via pytest packaging==24.1 @@ -8,16 +8,20 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in +pydantic-core==2.23.4 + # via pydantic pytest==8.3.2 # via # -r requirements/requirements.in @@ -26,9 +30,13 @@ pytest==8.3.2 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in @@ -40,15 +48,19 @@ pytest-sugar==1.0.0 # via -r requirements/requirements.in pyyaml==6.0.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via pytest typing-extensions==4.12.2 - # via pydantic + # via + # pydantic + # pydantic-core diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index fe4fab279b4..3e905e55ad4 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -1,21 +1,23 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via httpx certifi==2024.7.4 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # httpcore # httpx # osparc-client click==8.1.7 # via typer -exceptiongroup==1.2.2 - # via anyio h11==0.14.0 # via httpcore httpcore==1.0.5 # via httpx httpx==0.27.0 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc idna==3.7 @@ -34,14 +36,23 @@ osparc-client==0.6.6 # via osparc packaging==24.1 # via osparc -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via osparc-client +python-dotenv==1.0.1 + # via pydantic-settings rich==13.7.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -64,10 +75,11 @@ typer==0.12.4 # via -r requirements/../../../packages/settings-library/requirements/_base.in typing-extensions==4.12.2 # via - # anyio # pydantic + # pydantic-core # typer urllib3==2.2.2 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc-client diff --git a/tests/public-api/requirements/_test.txt b/tests/public-api/requirements/_test.txt index 757e13455fd..d0498fe06e2 100644 --- a/tests/public-api/requirements/_test.txt +++ b/tests/public-api/requirements/_test.txt @@ -11,8 +11,6 @@ aiosignal==1.3.1 # via aiohttp anyio==4.4.0 # via httpx -async-timeout==4.0.3 - # via aiohttp attrs==24.2.0 # via # aiohttp @@ -28,10 +26,6 @@ charset-normalizer==3.3.2 # via requests docker==7.1.0 # via -r requirements/_test.in -exceptiongroup==1.2.2 - # via - # anyio - # pytest faker==27.0.0 # via -r requirements/_test.in frozenlist==1.4.1 @@ -100,10 +94,6 @@ sniffio==1.3.1 # httpx tenacity==9.0.0 # via -r requirements/_test.in -tomli==2.0.1 - # via pytest -typing-extensions==4.12.2 - # via anyio urllib3==2.2.2 # via # -c requirements/../../../requirements/constraints.txt diff --git a/tests/public-api/requirements/_tools.txt b/tests/public-api/requirements/_tools.txt index 7f2f1b178b4..5f175f88fa6 100644 --- a/tests/public-api/requirements/_tools.txt +++ b/tests/public-api/requirements/_tools.txt @@ -69,22 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_base.txt - # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 68e7f7e9a96..a85e38aa920 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -23,16 +23,28 @@ aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -49,6 +61,8 @@ alembic==1.13.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/_test.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -65,10 +79,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 @@ -78,16 +90,28 @@ attrs==24.2.0 # referencing certifi==2024.7.4 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -110,10 +134,6 @@ docker==7.1.0 # -r requirements/_test.in email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via - # anyio - # pytest fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -152,16 +172,28 @@ jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -181,16 +213,28 @@ multidict==6.0.5 # yarl orjson==3.10.7 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -213,34 +257,78 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -256,16 +344,28 @@ pytest==8.3.2 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -281,19 +381,33 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings pyyaml==6.0.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -302,16 +416,28 @@ pyyaml==6.0.2 # -r requirements/_test.in redis==5.0.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -346,16 +472,28 @@ sniffio==1.3.1 # via anyio sqlalchemy==1.4.53 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -372,8 +510,6 @@ tenacity==9.0.0 # -r requirements/_test.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via pytest toolz==0.12.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -397,25 +533,37 @@ typing-extensions==4.12.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # aiodebug # alembic - # anyio # faststream # flexcache # flexparser # pint # pydantic + # pydantic-core # typer urllib3==2.2.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 24faa87728a..14b66aa5089 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -69,21 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 6fe15446dffd310482bb71b2a545807ff1a48e41 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 13:48:39 +0200 Subject: [PATCH 255/280] fix mypy --- .../models/schemas/jobs.py | 2 +- .../services/director_v2.py | 2 +- .../services/solver_job_models_converters.py | 4 ++-- .../simcore_service_api_server/services/storage.py | 14 +++++++------- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index 93c2982e136..f3c2c16672b 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -223,7 +223,7 @@ def create_now( @classmethod def create_solver_job(cls, *, solver: Solver, inputs: JobInputs): return Job.create_now( - parent_name=solver.name, # type: ignore + parent_name=solver.name, inputs_checksum=inputs.compute_checksum(), ) diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index f2a9eca3018..128510ebd14 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -54,7 +54,7 @@ def guess_progress(self) -> PercentageInt: json_schema_extra={ "examples": [ { - **ComputationTask.model_config["json_schema_extra"]["examples"][0], + **ComputationTask.model_config["json_schema_extra"]["examples"][0], # type: ignore "url": "https://link-to-stop-computation", } ] diff --git a/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py index 74a74940e40..ff57e93d4a7 100644 --- a/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/services/solver_job_models_converters.py @@ -69,7 +69,7 @@ def create_node_inputs_from_job_inputs( # FIXME: ensure this aligns with storage policy node_inputs[KeyIDStr(name)] = SimCoreFileLink( store=0, - path=f"api/{value.id}/{value.filename}", # type: ignore[arg-type] + path=f"api/{value.id}/{value.filename}", label=value.filename, eTag=value.e_tag, ) @@ -90,7 +90,7 @@ def create_job_inputs_from_node_inputs(inputs: dict[InputID, InputTypes]) -> Job for name, value in inputs.items(): assert TypeAdapter(InputID).validate_python(name) == name # nosec assert ( # nosec - TypeAdapter(InputTypes).validate_python(value) == value # type: ignore[arg-type] + TypeAdapter(InputTypes).validate_python(value) == value ) if isinstance(value, SimCoreFileLink): diff --git a/services/api-server/src/simcore_service_api_server/services/storage.py b/services/api-server/src/simcore_service_api_server/services/storage.py index 13920d8a931..160f3fdae06 100644 --- a/services/api-server/src/simcore_service_api_server/services/storage.py +++ b/services/api-server/src/simcore_service_api_server/services/storage.py @@ -72,9 +72,9 @@ async def list_files( ) response.raise_for_status() - files_metadata = Envelope[FileMetaDataArray].parse_raw(response.text).data + files_metadata = Envelope[FileMetaDataArray].model_validate_json(response.text).data files: list[StorageFileMetaData] = ( - [] if files_metadata is None else files_metadata.__root__ + [] if files_metadata is None else files_metadata.root ) return files @@ -107,9 +107,9 @@ async def search_owned_files( ) response.raise_for_status() - files_metadata = Envelope[FileMetaDataArray].parse_raw(response.text).data + files_metadata = Envelope[FileMetaDataArray].model_validate_json(response.text).data files: list[StorageFileMetaData] = ( - [] if files_metadata is None else files_metadata.__root__ + [] if files_metadata is None else files_metadata.root ) assert len(files) <= limit if limit else True # nosec return files @@ -127,7 +127,7 @@ async def get_download_link( response.raise_for_status() presigned_link: PresignedLink | None = ( - Envelope[PresignedLink].parse_raw(response.text).data + Envelope[PresignedLink].model_validate_json(response.text).data ) assert presigned_link is not None link: AnyUrl = presigned_link.link @@ -154,7 +154,7 @@ async def get_upload_links( ) response.raise_for_status() - enveloped_data = Envelope[FileUploadSchema].parse_raw(response.text) + enveloped_data = Envelope[FileUploadSchema].model_validate_json(response.text) assert enveloped_data.data # nosec return enveloped_data.data @@ -200,7 +200,7 @@ async def create_soft_link( ) response.raise_for_status() - stored_file_meta = Envelope[StorageFileMetaData].parse_raw(response.text).data + stored_file_meta = Envelope[StorageFileMetaData].model_validate_json(response.text).data assert stored_file_meta is not None file_meta: File = to_file_api_model(stored_file_meta) return file_meta From b29329a91b46325275c7f9da1dd135900883529d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 13:54:41 +0200 Subject: [PATCH 256/280] fix reqs --- services/web/server/requirements/_base.in | 1 + services/web/server/requirements/ci.txt | 1 + services/web/server/requirements/dev.txt | 1 + services/web/server/requirements/prod.txt | 1 + 4 files changed, 4 insertions(+) diff --git a/services/web/server/requirements/_base.in b/services/web/server/requirements/_base.in index b0a765fd8ea..e412ee73d08 100644 --- a/services/web/server/requirements/_base.in +++ b/services/web/server/requirements/_base.in @@ -9,6 +9,7 @@ # - Added as constraints instead of requirements in order to avoid polluting base.txt # - Will be installed when prod.txt or dev.txt # +--requirement ../../../../packages/common-library/requirements/_base.in --requirement ../../../../packages/models-library/requirements/_base.in --requirement ../../../../packages/postgres-database/requirements/_base.in --requirement ../../../../packages/settings-library/requirements/_base.in diff --git a/services/web/server/requirements/ci.txt b/services/web/server/requirements/ci.txt index 9a171226abf..2c6d577c042 100644 --- a/services/web/server/requirements/ci.txt +++ b/services/web/server/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../../../packages/common-library simcore-models-library @ ../../../packages/models-library simcore-postgres-database @ ../../../packages/postgres-database simcore-settings-library @ ../../../packages/settings-library diff --git a/services/web/server/requirements/dev.txt b/services/web/server/requirements/dev.txt index b62c7127482..fdc9cb27429 100644 --- a/services/web/server/requirements/dev.txt +++ b/services/web/server/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../../packages/common-library/ --editable ../../../packages/models-library/ --editable ../../../packages/postgres-database/ --editable ../../../packages/settings-library/ diff --git a/services/web/server/requirements/prod.txt b/services/web/server/requirements/prod.txt index 9494dd12c30..2ccad765e49 100644 --- a/services/web/server/requirements/prod.txt +++ b/services/web/server/requirements/prod.txt @@ -10,6 +10,7 @@ --requirement _base.txt # installs this repo's packages +simcore-common-library @ ../../../packages/common-library simcore-models-library @ ../../../packages/models-library simcore-postgres-database @ ../../../packages/postgres-database simcore-settings-library @ ../../../packages/settings-library From 3cb4ea5dd41bfb54392e759a38fe9378fe584d43 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 14:19:15 +0200 Subject: [PATCH 257/280] add typeadapter --- .../datcore-adapter/tests/unit/test_route_datasets.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/datcore-adapter/tests/unit/test_route_datasets.py b/services/datcore-adapter/tests/unit/test_route_datasets.py index 2c9c98b20f4..559e43cc0ed 100644 --- a/services/datcore-adapter/tests/unit/test_route_datasets.py +++ b/services/datcore-adapter/tests/unit/test_route_datasets.py @@ -8,7 +8,7 @@ import httpx import respx from fastapi_pagination import Page -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_service_datcore_adapter.models.schemas.datasets import ( DatasetMetaData, FileMetaData, @@ -29,7 +29,7 @@ async def test_list_datasets_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(Page[DatasetMetaData], data) + TypeAdapter(Page[DatasetMetaData]).validate_python(data) async def test_list_dataset_files_legacy_entrypoint( @@ -47,7 +47,7 @@ async def test_list_dataset_files_legacy_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(list[FileMetaData], data) + TypeAdapter(list[FileMetaData]).validate_python(data) async def test_list_dataset_top_level_files_entrypoint( @@ -65,7 +65,7 @@ async def test_list_dataset_top_level_files_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(Page[FileMetaData], data) + TypeAdapter(Page[FileMetaData]).validate_python(data) async def test_list_dataset_collection_files_entrypoint( @@ -85,4 +85,4 @@ async def test_list_dataset_collection_files_entrypoint( assert response.status_code == status.HTTP_200_OK data = response.json() assert data - parse_obj_as(Page[FileMetaData], data) + TypeAdapter(Page[FileMetaData]).validate_python(data) From e36b18c93a31db9e44686eb59a1d70456c9f5117 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 15:38:47 +0200 Subject: [PATCH 258/280] move changes --- packages/aws-library/requirements/ci.txt | 3 +- packages/aws-library/requirements/dev.txt | 3 +- .../src/aws_library/ec2/__init__.py | 8 +++ .../src/aws_library/ec2/_models.py | 10 ++-- packages/common-library/requirements/_test.in | 1 + .../common-library/requirements/_test.txt | 23 +++++++- .../common-library/requirements/_tools.txt | 2 +- .../pydantic_networks_extension.py | 14 ++++- .../src/common_library/serialization.py | 4 ++ .../tests/test_errors_classes.py | 3 +- .../dynamic_services_service.py | 4 +- .../api_schemas_webserver/projects_nodes.py | 2 +- .../src/models_library/projects_nodes_io.py | 4 ++ .../postgres-database/requirements/_base.in | 1 + .../postgres-database/requirements/ci.txt | 4 +- .../postgres-database/requirements/dev.txt | 5 +- .../helpers/httpx_client_base_dev.py | 14 +++-- .../src/servicelib/rabbitmq/_errors.py | 4 +- .../dynamic_scheduler/errors.py | 4 +- .../resource_usage_tracker/errors.py | 4 +- .../deferred_tasks/test_deferred_tasks.py | 2 +- .../tests/fastapi/test_http_client_thin.py | 4 +- .../tests/fastapi/test_openapi.py | 2 +- .../src/settings_library/base.py | 8 +-- .../src/settings_library/utils_cli.py | 2 +- .../tests/test__pydantic_settings.py | 2 +- .../simcore_sdk/node_ports_v2/nodeports_v2.py | 22 ++++++-- .../unit/test_node_ports_v2_nodeports_v2.py | 53 +++++++++++++++++++ 28 files changed, 169 insertions(+), 43 deletions(-) diff --git a/packages/aws-library/requirements/ci.txt b/packages/aws-library/requirements/ci.txt index 7fcd69a010f..f39ca4e3672 100644 --- a/packages/aws-library/requirements/ci.txt +++ b/packages/aws-library/requirements/ci.txt @@ -11,8 +11,9 @@ --requirement _test.txt # installs this repo's packages +simcore-common-library @ ../common-library +simcore-models-library @ ../models-library/ pytest-simcore @ ../pytest-simcore -simcore-models-library @ ../models-library simcore-service-library @ ../service-library/ simcore-settings-library @ ../settings-library/ diff --git a/packages/aws-library/requirements/dev.txt b/packages/aws-library/requirements/dev.txt index f8956725407..34cc644b370 100644 --- a/packages/aws-library/requirements/dev.txt +++ b/packages/aws-library/requirements/dev.txt @@ -12,8 +12,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../pytest-simcore +--editable ../common-library/ --editable ../models-library/ +--editable ../pytest-simcore/ --editable ../service-library/ --editable ../settings-library/ diff --git a/packages/aws-library/src/aws_library/ec2/__init__.py b/packages/aws-library/src/aws_library/ec2/__init__.py index 02fcf10b00e..112c70861b2 100644 --- a/packages/aws-library/src/aws_library/ec2/__init__.py +++ b/packages/aws-library/src/aws_library/ec2/__init__.py @@ -1,6 +1,10 @@ from ._client import SimcoreEC2API from ._errors import EC2AccessError, EC2NotConnectedError, EC2RuntimeError from ._models import ( + AWS_TAG_KEY_MAX_LENGTH, + AWS_TAG_KEY_MIN_LENGTH, + AWS_TAG_VALUE_MAX_LENGTH, + AWS_TAG_VALUE_MIN_LENGTH, AWSTagKey, AWSTagValue, EC2InstanceBootSpecific, @@ -14,6 +18,10 @@ __all__: tuple[str, ...] = ( "AWSTagKey", "AWSTagValue", + "AWS_TAG_KEY_MIN_LENGTH", + "AWS_TAG_KEY_MAX_LENGTH", + "AWS_TAG_VALUE_MIN_LENGTH", + "AWS_TAG_VALUE_MAX_LENGTH", "EC2AccessError", "EC2InstanceBootSpecific", "EC2InstanceConfig", diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index d1ff9cb3cc9..ad15a74f0eb 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -2,7 +2,7 @@ import re import tempfile from dataclasses import dataclass -from typing import Annotated, TypeAlias +from typing import Annotated, Final, TypeAlias import sh # type: ignore[import-untyped] from models_library.docker import DockerGenericTag @@ -68,17 +68,21 @@ class EC2InstanceType: InstancePrivateDNSName: TypeAlias = str +AWS_TAG_KEY_MIN_LENGTH: Final[int] = 1 +AWS_TAG_KEY_MAX_LENGTH: Final[int] = 128 AWSTagKey: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] str, StringConstraints( - min_length=1, - max_length=128, + min_length=AWS_TAG_KEY_MIN_LENGTH, + max_length=AWS_TAG_KEY_MAX_LENGTH, pattern=re.compile(r"^(?!(_index|\.{1,2})$)[a-zA-Z0-9\+\-=\._:@]+$"), ), ] +AWS_TAG_VALUE_MIN_LENGTH: Final[int] = 0 +AWS_TAG_VALUE_MAX_LENGTH: Final[int] = 256 AWSTagValue: TypeAlias = Annotated[ # see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions] # quotes []{} were added as it allows to json encode. it seems to be accepted as a value diff --git a/packages/common-library/requirements/_test.in b/packages/common-library/requirements/_test.in index 4454d79d36a..1fe37ac0151 100644 --- a/packages/common-library/requirements/_test.in +++ b/packages/common-library/requirements/_test.in @@ -10,6 +10,7 @@ coverage faker +pydantic-settings pytest pytest-asyncio pytest-cov diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt index 2354abd790d..89b9a19eca6 100644 --- a/packages/common-library/requirements/_test.txt +++ b/packages/common-library/requirements/_test.txt @@ -1,8 +1,12 @@ +annotated-types==0.7.0 + # via + # -c requirements/_base.txt + # pydantic coverage==7.6.1 # via # -r requirements/_test.in # pytest-cov -faker==30.1.0 +faker==30.3.0 # via -r requirements/_test.in icdiff==2.0.7 # via pytest-icdiff @@ -16,6 +20,17 @@ pluggy==1.5.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +pydantic==2.9.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pydantic-settings +pydantic-core==2.23.4 + # via + # -c requirements/_base.txt + # pydantic +pydantic-settings==2.5.2 + # via -r requirements/_test.in pytest==8.3.3 # via # -r requirements/_test.in @@ -44,7 +59,9 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via faker python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings six==1.16.0 # via python-dateutil termcolor==2.5.0 @@ -53,3 +70,5 @@ typing-extensions==4.12.2 # via # -c requirements/_base.txt # faker + # pydantic + # pydantic-core diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt index a333bb822ae..b5f85d4efcc 100644 --- a/packages/common-library/requirements/_tools.txt +++ b/packages/common-library/requirements/_tools.txt @@ -1,6 +1,6 @@ astroid==3.3.5 # via pylint -black==24.8.0 +black==24.10.0 # via -r requirements/../../../requirements/devenv.txt build==1.2.2.post1 # via pip-tools diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index b53a2bfc8ae..8d269f3fbb3 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -1,5 +1,15 @@ from typing import Annotated, TypeAlias -from pydantic import AfterValidator, AnyHttpUrl +from pydantic import AfterValidator, AnyHttpUrl, HttpUrl -AnyHttpUrlLegacy: TypeAlias = Annotated[str, AnyHttpUrl, AfterValidator(lambda u: u.rstrip("/"))] + +def _strip_last_slash(url: str) -> str: + return url.rstrip("/") + + +AnyHttpUrlLegacy: TypeAlias = Annotated[ + str, AnyHttpUrl, AfterValidator(_strip_last_slash) +] + + +HttpUrlLegacy: TypeAlias = Annotated[str, HttpUrl, AfterValidator(_strip_last_slash)] diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 510bdf6a469..a2178cc3ae6 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,3 +1,4 @@ +from datetime import timedelta from typing import Any from common_library.pydantic_fields_extension import get_type @@ -15,6 +16,9 @@ def model_dump_with_secrets( field_data = data[field_name] + if isinstance(field_data, timedelta): + data[field_name] = field_data.total_seconds() + if isinstance(field_data, SecretStr): if show_secrets: data[field_name] = field_data.get_secret_value() diff --git a/packages/common-library/tests/test_errors_classes.py b/packages/common-library/tests/test_errors_classes.py index ae0ed8c1e3d..63674fbd3b4 100644 --- a/packages/common-library/tests/test_errors_classes.py +++ b/packages/common-library/tests/test_errors_classes.py @@ -38,8 +38,7 @@ class B12(B1, ValueError): def test_error_codes_and_msg_template(): class MyBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) # Do not forget this for base exceptions! + pass class MyValueError(MyBaseError, ValueError): msg_template = "Wrong value {value}" diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py index f0958695e15..d103a3ea8c5 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py @@ -33,7 +33,7 @@ class CommonServiceDetails(BaseModel): class ServiceDetails(CommonServiceDetails): - basepath: Path = Field( + basepath: Path | None = Field( default=None, description="predefined path where the dynamic service should be served. If empty, the service shall use the root endpoint.", alias="service_basepath", @@ -68,7 +68,7 @@ class RunningDynamicServiceDetails(ServiceDetails): internal_port: PortInt = Field( ..., description="the service swarm internal port", alias="service_port" ) - published_port: PortInt = Field( + published_port: PortInt | None = Field( default=None, description="the service swarm published port if any", deprecated=True, diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index cda166e0d13..02fabd46f7a 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -103,7 +103,7 @@ class NodeGet(OutputSchema): "service_basepath": "/x/E1O2E-LAH", "service_state": "pending", "service_message": "no suitable node (insufficient resources on 1 node)", - "user_id": 123, + "user_id": "123", } } ) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 412a6927462..bee43a4b3dc 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -176,6 +176,10 @@ def legacy_enforce_str_to_int(cls, v): return int(v) return v + model_config = ConfigDict( + populate_by_name=True + ) + class SimCoreFileLink(BaseFileLink): """I/O port type to hold a link to a file in simcore S3 storage""" diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in index 48679f44663..0294edf9114 100644 --- a/packages/postgres-database/requirements/_base.in +++ b/packages/postgres-database/requirements/_base.in @@ -3,6 +3,7 @@ # --constraint ../../../requirements/constraints.txt --constraint ./constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in alembic pydantic diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index ffac2669760..8df1aecf670 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -12,8 +12,8 @@ --requirement _test.txt # installs this repo's packages -pytest-simcore @ ../../packages/pytest-simcore/ -simcore-models-library @ ../models-library +simcore-common-library @ ../common-library/ +pytest-simcore @ ../pytest-simcore/ # current module simcore-postgres-database @ . diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt index a05947becdf..095f8383b2a 100644 --- a/packages/postgres-database/requirements/dev.txt +++ b/packages/postgres-database/requirements/dev.txt @@ -13,8 +13,9 @@ --requirement _tools.txt # installs this repo's packages ---editable ../../packages/pytest-simcore/ ---editable ../models-library/ +--editable ../common-library/ +--editable ../pytest-simcore/ + # current module --editable . diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py index d9b5bb64437..9a36d4cc020 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/httpx_client_base_dev.py @@ -6,7 +6,7 @@ from fastapi.encoders import jsonable_encoder from httpx._types import URLTypes from jsonschema import ValidationError -from pydantic import parse_file_as +from pydantic import TypeAdapter from .httpx_calls_capture_errors import CaptureProcessingError from .httpx_calls_capture_models import HttpApiCallCaptureModel, get_captured_model @@ -14,6 +14,11 @@ _logger = logging.getLogger(__name__) +_HTTP_API_CALL_CAPTURE_MODEL_ADAPTER: TypeAdapter[ + list[HttpApiCallCaptureModel] +] = TypeAdapter(list[HttpApiCallCaptureModel]) + + class AsyncClientCaptureWrapper(httpx.AsyncClient): """ Adds captures mechanism @@ -41,8 +46,11 @@ async def request(self, method: str, url: URLTypes, **kwargs): or self._capture_file.read_text().strip() == "" ): self._capture_file.write_text("[]") - serialized_captures: list[HttpApiCallCaptureModel] = parse_file_as( - list[HttpApiCallCaptureModel], self._capture_file + + serialized_captures: list[ + HttpApiCallCaptureModel + ] = _HTTP_API_CALL_CAPTURE_MODEL_ADAPTER.validate_json( + self._capture_file.read_text() ) serialized_captures.append(capture) self._capture_file.write_text( diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index 105345efe9b..c105c2b8ff3 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -10,12 +10,12 @@ class BaseRPCError(OsparcErrorMixin, RuntimeError): class RPCNotInitializedError(BaseRPCError): - code = f"{_ERROR_PREFIX}.not_started" + code = f"{_ERROR_PREFIX}.not_started" # type: ignore[assignment] msg_template = "Please check that the RabbitMQ RPC backend was initialized!" class RemoteMethodNotRegisteredError(BaseRPCError): - code = f"{_ERROR_PREFIX}.remote_not_registered" + code = f"{_ERROR_PREFIX}.remote_not_registered" # type: ignore[assignment] msg_template = ( "Could not find a remote method named: '{method_name}'. " "Message from remote server was returned: {incoming_message}. " diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py index 5e104db333c..6d7bf2a722c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class BaseDynamicSchedulerRPCError(PydanticErrorMixin, Exception): +class BaseDynamicSchedulerRPCError(OsparcErrorMixin, Exception): ... diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py index 44549841802..f9c1a24f406 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class ResourceUsageTrackerRuntimeError(PydanticErrorMixin, RuntimeError): +class ResourceUsageTrackerRuntimeError(OsparcErrorMixin, RuntimeError): msg_template: str = "Resource-usage-tracker unexpected error" diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index 44a0a9379ed..9ea22f87ed1 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -16,8 +16,8 @@ import psutil import pytest from aiohttp.test_utils import unused_port +from common_library.serialization import model_dump_with_secrets from models_library.utils.json_serialization import json_dumps -from models_library.utils.serialization import model_dump_with_secrets from pydantic import NonNegativeFloat, NonNegativeInt from pytest_mock import MockerFixture from servicelib import redis as servicelib_redis diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index dfe11467518..7bd96b25eee 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -3,6 +3,7 @@ import logging from collections.abc import AsyncIterable, Iterable from typing import Final +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy import arrow import pytest @@ -77,7 +78,8 @@ async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: @pytest.fixture def test_url() -> str: - return str(TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111")) + url =TypeAdapter(AnyHttpUrlLegacy).validate_python("http://missing-host:1111") + return f"{url}" async def test_connection_error( diff --git a/packages/service-library/tests/fastapi/test_openapi.py b/packages/service-library/tests/fastapi/test_openapi.py index 0edd438c73a..54f7e017799 100644 --- a/packages/service-library/tests/fastapi/test_openapi.py +++ b/packages/service-library/tests/fastapi/test_openapi.py @@ -44,7 +44,7 @@ def test_exclusive_min_openapi_issue(app: FastAPI): def test_overriding_openapi_method(app: FastAPI): assert not hasattr(app, "_original_openapi") - assert app.openapi.__doc__ is None + #assert app.openapi.__doc__ is None # PC why was this set to check that it is none? it's coming from the base fastapi applicaiton and now they provide some docs override_fastapi_openapi_method(app) diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 6e7bd0d7b3b..c3f0e103e7a 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -2,11 +2,7 @@ from functools import cached_property from typing import Any, Final, get_origin -from common_library.utils.pydantic_fields_extension import ( - get_type, - is_literal, - is_nullable, -) +from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable from pydantic import ValidationInfo, field_validator from pydantic.fields import FieldInfo from pydantic_core import PydanticUndefined, ValidationError @@ -44,7 +40,7 @@ def _default_factory(): field_name, ) return None - + _logger.warning("Validation errors=%s", err.errors()) raise DefaultFromEnvFactoryError(errors=err.errors()) from err return _default_factory diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 4bdaf5c0e9c..2c1ab37116d 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -7,7 +7,7 @@ import rich import typer -from models_library.utils.serialization import model_dump_with_secrets +from common_library.serialization import model_dump_with_secrets from pydantic import ValidationError from pydantic_settings import BaseSettings diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index d42b5ad8586..bdc536387fc 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -12,10 +12,10 @@ """ +from common_library.pydantic_fields_extension import is_nullable from pydantic import ValidationInfo, field_validator from pydantic.fields import PydanticUndefined from pydantic_settings import BaseSettings -from common_library.utils.pydantic_fields_extension import is_nullable def assert_field_specs( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index edf24e6e297..d0ef9eb14bf 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -1,8 +1,12 @@ +from asyncio import Task +import traceback import logging from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any +from pydantic_core import InitErrorDetails + from models_library.api_schemas_storage import LinkType from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr @@ -26,6 +30,16 @@ log = logging.getLogger(__name__) +def _format_error(task:Task)-> str: + # pylint:disable=protected-access + assert task._exception #nosec + error_list= traceback.format_exception(type(task._exception), task._exception, task._exception.__traceback__) + return "\n".join(error_list) + +def _get_error_details(task:Task, port_key:str)->InitErrorDetails: + # pylint:disable=protected-access + return InitErrorDetails(type="value_error", loc=(f"{port_key}",), input=_format_error(task), ctx={"error":task._exception}) + class Nodeports(BaseModel): """ Represents a node in a project and all its input/output ports @@ -180,9 +194,9 @@ async def set_multiple( await self.save_to_db_cb(self) # groups all ValidationErrors pre-pending 'port_key' to loc and raises ValidationError - if errors := [ - list(flatten_errors([r], self.__config__, loc=(f"{port_key}",))) + if error_details:= [ + _get_error_details(r, port_key) for port_key, r in zip(port_values.keys(), results) - if isinstance(r, ValidationError) + if r is not None ]: - raise ValidationError(errors, model=type(self)) + raise ValidationError.from_exception_data(title="Multiple port_key errors",line_errors=error_details) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 1bc1fcde664..856b4b268b1 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -3,9 +3,11 @@ # pylint:disable=redefined-outer-name # pylint:disable=protected-access +import asyncio from pathlib import Path from typing import Any, Callable +from pydantic import ValidationError import pytest from faker import Faker from pytest_mock import MockFixture @@ -221,3 +223,54 @@ async def test_node_ports_v2_packages( db_manager = mock_db_manager(default_configuration) node_ports = await ports(user_id, project_id, node_uuid) node_ports = await ports(user_id, project_id, node_uuid, db_manager=db_manager) + + +@pytest.fixture +def mock_port_set(mocker: MockFixture)->None: + async def _always_raise_error(*args, **kwargs): + async def _i_raise_errors(): + raise ValidationError("invalid") + return asyncio.create_task(_i_raise_errors()) + + mocker.patch("simcore_sdk.node_ports_v2.port.Port._set", side_effect=_always_raise_error) + +async def test_node_ports_v2_set_multiple_catch_multiple_failing_set_ports( + mock_port_set:None, + mock_db_manager: Callable, + default_configuration: dict[str, Any], + user_id: int, + project_id: str, + node_uuid: str, + faker: Faker, +): + db_manager = mock_db_manager(default_configuration) + + original_inputs = create_valid_port_mapping(InputsList, suffix="original") + original_outputs = create_valid_port_mapping(OutputsList, suffix="original") + + + async def _mock_callback(*args,**kwargs): + pass + + node_ports = Nodeports( + inputs=original_inputs, + outputs=original_outputs, + db_manager=db_manager, + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + io_log_redirect_cb=None, + save_to_db_cb=_mock_callback, + node_port_creator_cb=_mock_callback, + auto_update=False, + ) + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + with pytest.raises(ValidationError): + await node_ports.set_multiple( + { + port.key: (port.value, None) + for port in list(original_inputs.values()) + + list(original_outputs.values()) + }, + progress_bar=progress_bar, + ) From fdd19c60b179b884f5c63e295f2740f6c52ab681 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 16:22:03 +0200 Subject: [PATCH 259/280] move base type adapters --- .../aws-library/src/aws_library/s3/_client.py | 13 +++++++------ .../pydantic_networks_extension.py | 18 +++++++++++++++--- .../common_library/pydantic_type_adapters.py | 8 ++++++++ .../src/models_library/projects_nodes_io.py | 13 ++++--------- .../src/models_library/rest_pagination.py | 6 ++---- .../models_library/rest_pagination_utils.py | 13 ++++++------- .../aiohttp/long_running_tasks/_server.py | 11 +++++------ .../fastapi/long_running_tasks/_client.py | 7 +++---- .../src/settings_library/ssm.py | 9 ++++----- 9 files changed, 54 insertions(+), 44 deletions(-) create mode 100644 packages/common-library/src/common_library/pydantic_type_adapters.py diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index b2ae33cc9b4..716ee3ca37b 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -13,6 +13,7 @@ from boto3.s3.transfer import TransferConfig from botocore import exceptions as botocore_exc from botocore.client import Config +from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter from models_library.api_schemas_storage import ETag, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -43,8 +44,8 @@ _MAX_CONCURRENT_COPY: Final[int] = 4 _AWS_MAX_ITEMS_PER_PAGE: Final[int] = 1000 -_ANY_URL_ADAPTER: Final[TypeAdapter[AnyUrl]] = TypeAdapter(AnyUrl) -_LIST_ANY_URL_ADAPTER: Final[TypeAdapter[list[AnyUrl]]] = TypeAdapter(list[AnyUrl]) + +ListAnyUrlTypeAdapter: Final[TypeAdapter[list[AnyUrl]]] = TypeAdapter(list[AnyUrl]) class UploadedBytesTransferredCallback(Protocol): @@ -263,7 +264,7 @@ async def create_single_presigned_download_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - return _ANY_URL_ADAPTER.validate_python(generated_link) + return AnyUrlLegacyAdapter.validate_python(generated_link) @s3_exception_handler(_logger) async def create_single_presigned_upload_link( @@ -276,7 +277,7 @@ async def create_single_presigned_upload_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - return _ANY_URL_ADAPTER.validate_python(generated_link) + return AnyUrlLegacyAdapter.validate_python(generated_link) @s3_exception_handler(_logger) async def create_multipart_upload_links( @@ -299,7 +300,7 @@ async def create_multipart_upload_links( # compute the number of links, based on the announced file size num_upload_links, chunk_size = compute_num_file_chunks(file_size) # now create the links - upload_links = _LIST_ANY_URL_ADAPTER.validate_python( + upload_links = ListAnyUrlTypeAdapter.validate_python( await asyncio.gather( *( self._client.generate_presigned_url( @@ -473,6 +474,6 @@ def is_multipart(file_size: ByteSize) -> bool: @staticmethod def compute_s3_url(*, bucket: S3BucketName, object_key: S3ObjectKey) -> AnyUrl: - return _ANY_URL_ADAPTER.validate_python( + return AnyUrlLegacyAdapter.validate_python( f"s3://{bucket}/{urllib.parse.quote(object_key)}" ) diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index 8d269f3fbb3..d98ea73b53c 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -1,15 +1,27 @@ from typing import Annotated, TypeAlias -from pydantic import AfterValidator, AnyHttpUrl, HttpUrl +from pydantic import AfterValidator, AnyHttpUrl, AnyUrl, HttpUrl def _strip_last_slash(url: str) -> str: return url.rstrip("/") +AnyUrlLegacy: TypeAlias = Annotated[ + str, + AnyUrl, + AfterValidator(_strip_last_slash), +] + AnyHttpUrlLegacy: TypeAlias = Annotated[ - str, AnyHttpUrl, AfterValidator(_strip_last_slash) + str, + AnyHttpUrl, + AfterValidator(_strip_last_slash), ] -HttpUrlLegacy: TypeAlias = Annotated[str, HttpUrl, AfterValidator(_strip_last_slash)] +HttpUrlLegacy: TypeAlias = Annotated[ + str, + HttpUrl, + AfterValidator(_strip_last_slash), +] diff --git a/packages/common-library/src/common_library/pydantic_type_adapters.py b/packages/common-library/src/common_library/pydantic_type_adapters.py new file mode 100644 index 00000000000..b9db9e6186b --- /dev/null +++ b/packages/common-library/src/common_library/pydantic_type_adapters.py @@ -0,0 +1,8 @@ +from typing import Final + +from common_library.pydantic_networks_extension import AnyHttpUrlLegacy, AnyUrlLegacy +from pydantic import TypeAdapter + +AnyUrlLegacyAdapter: Final[TypeAdapter[AnyUrlLegacy]] = TypeAdapter(AnyUrlLegacy) + +AnyHttpUrlLegacyAdapter: Final[TypeAdapter] = TypeAdapter(AnyHttpUrlLegacy) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index bee43a4b3dc..92e2e767572 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -7,12 +7,12 @@ """ from pathlib import Path -from typing import Annotated, Final, TypeAlias +from typing import Annotated, TypeAlias from uuid import UUID +from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter from models_library.basic_types import ConstrainedStr, KeyIDStr from pydantic import ( - AnyUrl, BaseModel, BeforeValidator, ConfigDict, @@ -45,9 +45,6 @@ ] -_ANY_URL_ADAPTER: Final[TypeAdapter[AnyUrl]] = TypeAdapter(AnyUrl) - - class SimcoreS3DirectoryID(ConstrainedStr): """ A simcore directory has the following structure: @@ -126,7 +123,7 @@ class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" download_link: Annotated[ - str, BeforeValidator(lambda x: str(_ANY_URL_ADAPTER.validate_python(x))) + str, BeforeValidator(lambda x: str(AnyUrlLegacyAdapter.validate_python(x))) ] = Field(..., alias="downloadLink") label: str | None = Field(default=None, description="Display name") model_config = ConfigDict( @@ -176,9 +173,7 @@ def legacy_enforce_str_to_int(cls, v): return int(v) return v - model_config = ConfigDict( - populate_by_name=True - ) + model_config = ConfigDict(populate_by_name=True) class SimCoreFileLink(BaseFileLink): diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py index 0163cebc322..63b4e4948b0 100644 --- a/packages/models-library/src/models_library/rest_pagination.py +++ b/packages/models-library/src/models_library/rest_pagination.py @@ -1,7 +1,7 @@ from typing import Annotated, Final, Generic, TypeAlias, TypeVar +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from pydantic import ( - AnyHttpUrl, BaseModel, BeforeValidator, ConfigDict, @@ -15,8 +15,6 @@ from .utils.common_validators import none_to_empty_list_pre_validator -_ANY_HTTP_URL_ADAPTER: Final[TypeAdapter[AnyHttpUrl]] = TypeAdapter(AnyHttpUrl) - # Default limit values # - Using same values across all pagination entrypoints simplifies # interconnecting paginated calls @@ -101,7 +99,7 @@ class PageLinks( PageRefs[ Annotated[ str, - BeforeValidator(lambda x: str(_ANY_HTTP_URL_ADAPTER.validate_python(x))), + BeforeValidator(lambda x: str(AnyHttpUrlLegacyAdapter.validate_python(x))), ] ] ): diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index 660b69ba303..c9ae6ed4167 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -1,7 +1,7 @@ from math import ceil from typing import Any, Protocol, TypedDict, Union, runtime_checkable -from pydantic import AnyHttpUrl, TypeAdapter +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from .rest_pagination import PageLinks, PageMetaInfoLimitOffset @@ -29,7 +29,6 @@ def replace_query_params(self, **kwargs: Any) -> "_StarletteURL": _URLType = Union[_YarlURL, _StarletteURL] -_ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) def _replace_query(url: _URLType, query: dict[str, Any]) -> str: @@ -73,21 +72,21 @@ def paginate_data( ), _links=PageLinks( self=( - _ANY_HTTP_URL_ADAPTER.validate_python( + AnyHttpUrlLegacyAdapter.validate_python( _replace_query(request_url, {"offset": offset, "limit": limit}), ) ), - first=_ANY_HTTP_URL_ADAPTER.validate_python( + first=AnyHttpUrlLegacyAdapter.validate_python( _replace_query(request_url, {"offset": 0, "limit": limit}) ), - prev=_ANY_HTTP_URL_ADAPTER.validate_python( + prev=AnyHttpUrlLegacyAdapter.validate_python( _replace_query( request_url, {"offset": max(offset - limit, 0), "limit": limit} ), ) if offset > 0 else None, - next=_ANY_HTTP_URL_ADAPTER.validate_python( + next=AnyHttpUrlLegacyAdapter.validate_python( _replace_query( request_url, {"offset": min(offset + limit, last_page * limit), "limit": limit}, @@ -95,7 +94,7 @@ def paginate_data( ) if offset < (last_page * limit) else None, - last=_ANY_HTTP_URL_ADAPTER.validate_python( + last=AnyHttpUrlLegacyAdapter.validate_python( _replace_query( request_url, {"offset": last_page * limit, "limit": limit} ), diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index 8e20e5950dd..bce905e9cd6 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -5,8 +5,9 @@ from typing import Any from aiohttp import web +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from models_library.utils.json_serialization import json_dumps -from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter +from pydantic import PositiveFloat from ...aiohttp import status from ...long_running_tasks._models import TaskGet @@ -28,8 +29,6 @@ _logger = logging.getLogger(__name__) -_ANY_HTTP_URL_ADAPTER: TypeAdapter[AnyHttpUrl] = TypeAdapter(AnyHttpUrl) - def no_ops_decorator(handler: Handler): return handler @@ -69,13 +68,13 @@ async def start_long_running_task( ip_addr, port = request_.transport.get_extra_info( "sockname" ) # https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseTransport.get_extra_info - status_url = _ANY_HTTP_URL_ADAPTER.validate_python( + status_url = AnyHttpUrlLegacyAdapter.validate_python( f"http://{ip_addr}:{port}{request_.app.router['get_task_status'].url_for(task_id=task_id)}" # NOSONAR ) - result_url = _ANY_HTTP_URL_ADAPTER.validate_python( + result_url = AnyHttpUrlLegacyAdapter.validate_python( f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR ) - abort_url = _ANY_HTTP_URL_ADAPTER.validate_python( + abort_url = AnyHttpUrlLegacyAdapter.validate_python( f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" # NOSONAR ) task_get = TaskGet( diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index 8c2c578c67f..204658220a0 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -4,10 +4,10 @@ import warnings from typing import Any, Awaitable, Callable, Final -from common_library.pydantic_networks_extension import AnyHttpUrlLegacy +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter from fastapi import FastAPI, status from httpx import AsyncClient, HTTPError -from pydantic import PositiveFloat, TypeAdapter +from pydantic import PositiveFloat from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -24,7 +24,6 @@ DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 -_ANY_HTTP_URL_LEGACY_ADAPTER: TypeAdapter[AnyHttpUrlLegacy] = TypeAdapter(AnyHttpUrlLegacy) logger = logging.getLogger(__name__) @@ -132,7 +131,7 @@ def _client_configuration(self) -> ClientConfiguration: return output def _get_url(self, path: str) -> str: - return _ANY_HTTP_URL_LEGACY_ADAPTER.validate_python( + return AnyHttpUrlLegacyAdapter.validate_python( f"{self._base_url}{self._client_configuration.router_prefix}{path}", ) diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 6bda70f2269..73bee409464 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,17 +1,16 @@ -from typing import Annotated, Final +from typing import Annotated -from pydantic import AnyHttpUrl, BeforeValidator, Field, SecretStr, TypeAdapter +from common_library.pydantic_type_adapters import AnyHttpUrlLegacyAdapter +from pydantic import BeforeValidator, Field, SecretStr from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings -_ANY_HTTP_URL_ADAPTER: Final[TypeAdapter] = TypeAdapter(AnyHttpUrl) - class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr SSM_ENDPOINT: Annotated[ - str, BeforeValidator(lambda x: str(_ANY_HTTP_URL_ADAPTER.validate_python(x))) + str, BeforeValidator(lambda x: str(AnyHttpUrlLegacyAdapter.validate_python(x))) ] | None = Field(default=None, description="do not define if using standard AWS") SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr From c9135516be1e6f1de3ba75e2d6b56ea8c8d9ebf8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 16:44:58 +0200 Subject: [PATCH 260/280] fix test --- .../models-library/src/models_library/projects_nodes_io.py | 4 ++-- packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 92e2e767572..3a79b6acf00 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -10,9 +10,9 @@ from typing import Annotated, TypeAlias from uuid import UUID -from common_library.pydantic_type_adapters import AnyUrlLegacyAdapter from models_library.basic_types import ConstrainedStr, KeyIDStr from pydantic import ( + AnyUrl, BaseModel, BeforeValidator, ConfigDict, @@ -123,7 +123,7 @@ class DownloadLink(BaseModel): """I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)""" download_link: Annotated[ - str, BeforeValidator(lambda x: str(AnyUrlLegacyAdapter.validate_python(x))) + str, BeforeValidator(lambda x: str(TypeAdapter(AnyUrl).validate_python(x))) ] = Field(..., alias="downloadLink") label: str | None = Field(default=None, description="Display name") model_config = ConfigDict( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py index 5116311ae01..95b114ae563 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_links.py @@ -2,7 +2,7 @@ from uuid import uuid4 import pytest -from pydantic import ValidationError +from pydantic import TypeAdapter, ValidationError from simcore_sdk.node_ports_v2.links import DownloadLink, FileLink, PortLink From 890583b83a4e0095d3fa5413e3deeb91e0dafe92 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 16:47:14 +0200 Subject: [PATCH 261/280] fix type --- .../common-library/src/common_library/pydantic_type_adapters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/common-library/src/common_library/pydantic_type_adapters.py b/packages/common-library/src/common_library/pydantic_type_adapters.py index b9db9e6186b..883100c4717 100644 --- a/packages/common-library/src/common_library/pydantic_type_adapters.py +++ b/packages/common-library/src/common_library/pydantic_type_adapters.py @@ -5,4 +5,4 @@ AnyUrlLegacyAdapter: Final[TypeAdapter[AnyUrlLegacy]] = TypeAdapter(AnyUrlLegacy) -AnyHttpUrlLegacyAdapter: Final[TypeAdapter] = TypeAdapter(AnyHttpUrlLegacy) +AnyHttpUrlLegacyAdapter: Final[TypeAdapter[AnyHttpUrlLegacy]] = TypeAdapter(AnyHttpUrlLegacy) From ffd0adc5702707842e2ce1c6aec6ea8ac36e016b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 8 Oct 2024 17:06:01 +0200 Subject: [PATCH 262/280] fix type --- .../src/common_library/pydantic_networks_extension.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index d98ea73b53c..77508a1690f 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -8,20 +8,20 @@ def _strip_last_slash(url: str) -> str: AnyUrlLegacy: TypeAlias = Annotated[ - str, AnyUrl, + AfterValidator(str), AfterValidator(_strip_last_slash), ] AnyHttpUrlLegacy: TypeAlias = Annotated[ - str, AnyHttpUrl, + AfterValidator(str), AfterValidator(_strip_last_slash), ] HttpUrlLegacy: TypeAlias = Annotated[ - str, HttpUrl, + AfterValidator(str), AfterValidator(_strip_last_slash), ] From 7663205ee85ec18044baed3ee3c9e20db39d9707 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 09:32:45 +0200 Subject: [PATCH 263/280] fix networks types --- .../pydantic_networks_extension.py | 8 +++--- .../tests/test_pydantic_networks_extension.py | 27 ++++++++++++++++--- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/packages/common-library/src/common_library/pydantic_networks_extension.py b/packages/common-library/src/common_library/pydantic_networks_extension.py index 77508a1690f..79c5da906b1 100644 --- a/packages/common-library/src/common_library/pydantic_networks_extension.py +++ b/packages/common-library/src/common_library/pydantic_networks_extension.py @@ -1,27 +1,25 @@ from typing import Annotated, TypeAlias from pydantic import AfterValidator, AnyHttpUrl, AnyUrl, HttpUrl +from pydantic_core import Url -def _strip_last_slash(url: str) -> str: - return url.rstrip("/") +def _strip_last_slash(url: Url) -> str: + return f"{url}".rstrip("/") AnyUrlLegacy: TypeAlias = Annotated[ AnyUrl, - AfterValidator(str), AfterValidator(_strip_last_slash), ] AnyHttpUrlLegacy: TypeAlias = Annotated[ AnyHttpUrl, - AfterValidator(str), AfterValidator(_strip_last_slash), ] HttpUrlLegacy: TypeAlias = Annotated[ HttpUrl, - AfterValidator(str), AfterValidator(_strip_last_slash), ] diff --git a/packages/common-library/tests/test_pydantic_networks_extension.py b/packages/common-library/tests/test_pydantic_networks_extension.py index 3390f7c2acf..6ab50a42a2b 100644 --- a/packages/common-library/tests/test_pydantic_networks_extension.py +++ b/packages/common-library/tests/test_pydantic_networks_extension.py @@ -1,20 +1,39 @@ +import pytest from common_library.pydantic_networks_extension import AnyHttpUrlLegacy -from pydantic import AnyHttpUrl, TypeAdapter +from pydantic import AnyHttpUrl, BaseModel, TypeAdapter, ValidationError from pydantic_core import Url +class A(BaseModel): + url: AnyHttpUrlLegacy + + def test_any_http_url(): url = TypeAdapter(AnyHttpUrl).validate_python( "http://backgroud.testserver.io", ) assert isinstance(url, Url) - assert f"{url}" == "http://backgroud.testserver.io/" # NOTE: trailing '/' added in Pydantic v2 + assert ( + f"{url}" == "http://backgroud.testserver.io/" + ) # trailing slash added (in Pydantic v2) + def test_any_http_url_legacy(): url = TypeAdapter(AnyHttpUrlLegacy).validate_python( - "http://backgroud.testserver.io", + "http://backgroud.testserver.io", ) assert isinstance(url, str) - assert url == "http://backgroud.testserver.io" + assert url == "http://backgroud.testserver.io" # no trailing slash was added + + +def test_valid_any_http_url_legacy_field(): + a = A(url="http://backgroud.testserver.io") # type: ignore + + assert a.url == "http://backgroud.testserver.io" # no trailing slash was added + + +def test_not_valid_any_http_url_legacy_field(): + with pytest.raises(ValidationError): + A(url="htttttp://backgroud.testserver.io") # type: ignore From 74330ed7d15a7070b7f665d95ab43325f244f627 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 09:38:52 +0200 Subject: [PATCH 264/280] update reqs --- .../requirements/requirements.txt | 28 ++- tests/public-api/requirements/_base.txt | 19 +- tests/swarm-deploy/requirements/_test.txt | 170 ++++++++++++++++-- tests/swarm-deploy/requirements/_tools.txt | 10 -- 4 files changed, 194 insertions(+), 33 deletions(-) diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index d1641743383..1ae402aba4e 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -1,5 +1,5 @@ -exceptiongroup==1.2.2 - # via pytest +annotated-types==0.7.0 + # via pydantic iniconfig==2.0.0 # via pytest packaging==24.1 @@ -8,16 +8,20 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in +pydantic-core==2.23.4 + # via pydantic pytest==8.3.2 # via # -r requirements/requirements.in @@ -26,9 +30,13 @@ pytest==8.3.2 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in @@ -40,15 +48,19 @@ pytest-sugar==1.0.0 # via -r requirements/requirements.in pyyaml==6.0.2 # via + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via pytest typing-extensions==4.12.2 - # via pydantic + # via + # pydantic + # pydantic-core diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index fe4fab279b4..4735ddc8cac 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -1,21 +1,23 @@ +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via httpx certifi==2024.7.4 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # httpcore # httpx # osparc-client click==8.1.7 # via typer -exceptiongroup==1.2.2 - # via anyio h11==0.14.0 # via httpcore httpcore==1.0.5 # via httpx httpx==0.27.0 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # osparc idna==3.7 @@ -34,14 +36,23 @@ osparc-client==0.6.6 # via osparc packaging==24.1 # via osparc -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-settings==2.5.2 + # via -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via osparc-client +python-dotenv==1.0.1 + # via pydantic-settings rich==13.7.1 # via # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -64,8 +75,8 @@ typer==0.12.4 # via -r requirements/../../../packages/settings-library/requirements/_base.in typing-extensions==4.12.2 # via - # anyio # pydantic + # pydantic-core # typer urllib3==2.2.2 # via diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 68e7f7e9a96..a85e38aa920 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -23,16 +23,28 @@ aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -49,6 +61,8 @@ alembic==1.13.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/_test.in +annotated-types==0.7.0 + # via pydantic anyio==4.4.0 # via # fast-depends @@ -65,10 +79,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in async-timeout==4.0.3 # via - # aiohttp # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 @@ -78,16 +90,28 @@ attrs==24.2.0 # referencing certifi==2024.7.4 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -110,10 +134,6 @@ docker==7.1.0 # -r requirements/_test.in email-validator==2.2.0 # via pydantic -exceptiongroup==1.2.2 - # via - # anyio - # pytest fast-depends==2.4.8 # via faststream faststream==0.5.18 @@ -152,16 +172,28 @@ jsonschema-specifications==2023.7.1 # via jsonschema mako==1.3.5 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -181,16 +213,28 @@ multidict==6.0.5 # yarl orjson==3.10.7 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -213,34 +257,78 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.17 +pydantic==2.9.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +pydantic-settings==2.5.2 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.18.0 # via rich pyinstrument==4.7.2 @@ -256,16 +344,28 @@ pytest==8.3.2 # pytest-sugar pytest-asyncio==0.23.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -281,19 +381,33 @@ pytest-sugar==1.0.0 python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # pydantic-settings pyyaml==6.0.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -302,16 +416,28 @@ pyyaml==6.0.2 # -r requirements/_test.in redis==5.0.8 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -346,16 +472,28 @@ sniffio==1.3.1 # via anyio sqlalchemy==1.4.53 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -372,8 +510,6 @@ tenacity==9.0.0 # -r requirements/_test.in termcolor==2.4.0 # via pytest-sugar -tomli==2.0.1 - # via pytest toolz==0.12.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -397,25 +533,37 @@ typing-extensions==4.12.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # aiodebug # alembic - # anyio # faststream # flexcache # flexparser # pint # pydantic + # pydantic-core # typer urllib3==2.2.2 # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 24faa87728a..14b66aa5089 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -69,21 +69,11 @@ ruff==0.6.1 # via -r requirements/../../../requirements/devenv.txt setuptools==73.0.1 # via pip-tools -tomli==2.0.1 - # via - # -c requirements/_test.txt - # black - # build - # mypy - # pip-tools - # pylint tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 # via # -c requirements/_test.txt - # astroid - # black # mypy virtualenv==20.26.3 # via pre-commit From 26348d05f3ee4f7c884b13bdf8767d9919b88cf9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 10:04:00 +0200 Subject: [PATCH 265/280] fix type --- .../models_library/rest_pagination_utils.py | 34 +++++++------------ 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py index c9ae6ed4167..1adfc5625c6 100644 --- a/packages/models-library/src/models_library/rest_pagination_utils.py +++ b/packages/models-library/src/models_library/rest_pagination_utils.py @@ -38,7 +38,9 @@ def _replace_query(url: _URLType, query: dict[str, Any]) -> str: new_url = url.update_query(query) else: new_url = url.replace_query_params(**query) - return f"{new_url}" + + new_url_str = f"{new_url}" + return f"{AnyHttpUrlLegacyAdapter.validate_python(new_url_str)}" class PageDict(TypedDict): @@ -71,33 +73,21 @@ def paginate_data( total=total, count=len(chunk), limit=limit, offset=offset ), _links=PageLinks( - self=( - AnyHttpUrlLegacyAdapter.validate_python( - _replace_query(request_url, {"offset": offset, "limit": limit}), - ) - ), - first=AnyHttpUrlLegacyAdapter.validate_python( - _replace_query(request_url, {"offset": 0, "limit": limit}) - ), - prev=AnyHttpUrlLegacyAdapter.validate_python( - _replace_query( - request_url, {"offset": max(offset - limit, 0), "limit": limit} - ), + self=_replace_query(request_url, {"offset": offset, "limit": limit}), + first=_replace_query(request_url, {"offset": 0, "limit": limit}), + prev=_replace_query( + request_url, {"offset": max(offset - limit, 0), "limit": limit} ) if offset > 0 else None, - next=AnyHttpUrlLegacyAdapter.validate_python( - _replace_query( - request_url, - {"offset": min(offset + limit, last_page * limit), "limit": limit}, - ), + next=_replace_query( + request_url, + {"offset": min(offset + limit, last_page * limit), "limit": limit}, ) if offset < (last_page * limit) else None, - last=AnyHttpUrlLegacyAdapter.validate_python( - _replace_query( - request_url, {"offset": last_page * limit, "limit": limit} - ), + last=_replace_query( + request_url, {"offset": last_page * limit, "limit": limit} ), ), data=chunk, From 6eb3bf7ee0f29391f9e07efd28a66af2d351d65e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 10:06:56 +0200 Subject: [PATCH 266/280] fix type --- .../src/servicelib/fastapi/long_running_tasks/_client.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index 204658220a0..36458031ff8 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -131,9 +131,8 @@ def _client_configuration(self) -> ClientConfiguration: return output def _get_url(self, path: str) -> str: - return AnyHttpUrlLegacyAdapter.validate_python( - f"{self._base_url}{self._client_configuration.router_prefix}{path}", - ) + url = f"{self._base_url}{self._client_configuration.router_prefix}{path}" + return f"{AnyHttpUrlLegacyAdapter.validate_python(url)}" @retry_on_http_errors async def get_task_status( From b3d0076a3e40499eb18e8994d44acf594a3e5481 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 10:38:53 +0200 Subject: [PATCH 267/280] fix url types --- packages/aws-library/src/aws_library/s3/_client.py | 8 ++++---- packages/aws-library/tests/test_s3_client.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 716ee3ca37b..816208c22c8 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -255,7 +255,7 @@ async def create_single_presigned_download_link( bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int, - ) -> AnyUrl: + ) -> str: # NOTE: ensure the bucket/object exists, this will raise if not await self._client.head_bucket(Bucket=bucket) await self._client.head_object(Bucket=bucket, Key=object_key) @@ -264,12 +264,12 @@ async def create_single_presigned_download_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - return AnyUrlLegacyAdapter.validate_python(generated_link) + return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}" @s3_exception_handler(_logger) async def create_single_presigned_upload_link( self, *, bucket: S3BucketName, object_key: S3ObjectKey, expiration_secs: int - ) -> AnyUrl: + ) -> str: # NOTE: ensure the bucket/object exists, this will raise if not await self._client.head_bucket(Bucket=bucket) generated_link = await self._client.generate_presigned_url( @@ -277,7 +277,7 @@ async def create_single_presigned_upload_link( Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=expiration_secs, ) - return AnyUrlLegacyAdapter.validate_python(generated_link) + return f"{AnyUrlLegacyAdapter.validate_python(generated_link)}" @s3_exception_handler(_logger) async def create_multipart_upload_links( diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index e6313c50e9b..af98fd6523f 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -694,11 +694,11 @@ async def test_create_single_presigned_download_link( object_key=with_uploaded_file_on_s3.s3_key, expiration_secs=default_expiration_time_seconds, ) - assert isinstance(download_url, AnyUrl) + assert download_url dest_file = tmp_path / faker.file_name() async with ClientSession() as session: - response = await session.get(str(download_url)) + response = await session.get(download_url) response.raise_for_status() with dest_file.open("wb") as fp: fp.write(await response.read()) @@ -744,7 +744,7 @@ async def test_create_single_presigned_upload_link( create_file_of_size: Callable[[ByteSize], Path], default_expiration_time_seconds: int, upload_to_presigned_link: Callable[ - [Path, AnyUrl, S3BucketName, S3ObjectKey], Awaitable[None] + [Path, str, S3BucketName, S3ObjectKey], Awaitable[None] ], ): file = create_file_of_size(_BYTE_SIZE_ADAPTER.validate_python("1Mib")) From 4606e1fa4be1c2290af27852f984b933cbd4c312 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 10:47:26 +0200 Subject: [PATCH 268/280] add type hints --- .../common-library/tests/test_pydantic_fields_extension.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/common-library/tests/test_pydantic_fields_extension.py b/packages/common-library/tests/test_pydantic_fields_extension.py index 50ff5443c41..9f5aa1ae2fc 100644 --- a/packages/common-library/tests/test_pydantic_fields_extension.py +++ b/packages/common-library/tests/test_pydantic_fields_extension.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Callable, Literal import pytest from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable @@ -68,5 +68,5 @@ class MyModel(BaseModel): (is_nullable, False, "e"), ], ) -def test_field_fn(fn, expected, name): +def test_field_fn(fn: Callable[[Any], Any], expected: Any, name: str): assert expected == fn(MyModel.model_fields[name]) From a37efc1047ed9ea5c7e86557e34b391e11b20db7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 10:48:43 +0200 Subject: [PATCH 269/280] fix minor --- packages/common-library/tests/test_serialization.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/packages/common-library/tests/test_serialization.py b/packages/common-library/tests/test_serialization.py index d53db58809c..d897ff5ec5d 100644 --- a/packages/common-library/tests/test_serialization.py +++ b/packages/common-library/tests/test_serialization.py @@ -1,5 +1,3 @@ -from typing import Final - import pytest from common_library.serialization import model_dump_with_secrets from pydantic import BaseModel, SecretStr @@ -10,9 +8,6 @@ class Credentials(BaseModel): PASSWORD: SecretStr | None = None -ME: Final[Credentials] = Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")) - - @pytest.mark.parametrize( "expected,show_secrets", [ @@ -27,4 +22,4 @@ class Credentials(BaseModel): ], ) def test_model_dump_with_secrets(expected: dict, show_secrets: bool): - assert expected == model_dump_with_secrets(ME, show_secrets=show_secrets) + assert expected == model_dump_with_secrets(Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")), show_secrets=show_secrets) From 638a0be950e47da6bcfffc46d194f403844ae09d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 11:12:59 +0200 Subject: [PATCH 270/280] add validation --- .../src/models_library/service_settings_nat_rule.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py index 3e193397821..1f50b62f503 100644 --- a/packages/models-library/src/models_library/service_settings_nat_rule.py +++ b/packages/models-library/src/models_library/service_settings_nat_rule.py @@ -1,14 +1,14 @@ from collections.abc import Generator from typing import Final -from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, ValidationInfo, field_validator from .basic_types import PortInt from .osparc_variable_identifier import OsparcVariableIdentifier, raise_if_unresolved # Cloudflare DNS server address DEFAULT_DNS_SERVER_ADDRESS: Final[str] = "1.1.1.1" # NOSONAR -DEFAULT_DNS_SERVER_PORT: Final[PortInt] = 53 +DEFAULT_DNS_SERVER_PORT: Final[PortInt] = TypeAdapter(PortInt).validate_python(53) class _PortRange(BaseModel): From 8540a811e11175224851880e298cd478d6e64517 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 11:28:24 +0200 Subject: [PATCH 271/280] upgrade web server --- .../announcements/_models.py | 12 ++-- .../application_settings.py | 43 +++++++-------- .../simcore_service_webserver/catalog/_api.py | 6 +- .../catalog/_handlers.py | 23 ++++---- .../diagnostics/settings.py | 6 +- .../director_v2/_models.py | 9 +-- .../folders/_folders_handlers.py | 20 +++---- .../groups/_classifiers.py | 20 +++---- .../groups/_handlers.py | 32 +++++------ .../login/_auth_handlers.py | 2 +- .../login/_models.py | 7 +-- .../login/_registration.py | 10 ++-- .../login/handlers_registration.py | 16 +++--- .../login/settings.py | 6 +- .../meta_modeling/_handlers.py | 4 +- .../meta_modeling/_results.py | 55 +++++++++---------- .../payments/_autorecharge_db.py | 8 +-- .../payments/_methods_db.py | 15 ++--- .../payments/_onetime_db.py | 6 +- .../payments/settings.py | 10 ++-- .../products/_model.py | 28 +++++----- .../projects/_comments_handlers.py | 18 ++---- .../projects/_common_models.py | 7 +-- .../projects/_crud_handlers_models.py | 35 ++++++------ .../projects/_folders_handlers.py | 10 ++-- .../projects/_groups_handlers.py | 10 +--- .../projects/_nodes_api.py | 12 ++-- .../projects/_ports_api.py | 7 ++- .../_projects_nodes_pricing_unit_handlers.py | 6 +- .../projects/_wallets_handlers.py | 6 +- .../projects/_workspaces_handlers.py | 10 ++-- .../projects/models.py | 20 +++---- .../_pricing_plans_admin_handlers.py | 14 ++--- .../resource_usage/_pricing_plans_handlers.py | 8 +-- .../resource_usage/_service_runs_handlers.py | 24 +++----- .../scicrunch/models.py | 14 ++--- .../session/settings.py | 9 ++- .../socketio/models.py | 10 ++-- .../statics/settings.py | 8 +-- .../storage/schemas.py | 36 ++++++------ .../studies_dispatcher/_redirects_handlers.py | 21 ++++--- .../studies_dispatcher/_rest_handlers.py | 27 ++++----- .../studies_dispatcher/settings.py | 24 ++++---- .../users/_notifications.py | 12 ++-- .../users/_schemas.py | 24 ++++---- .../users/schemas.py | 29 +++++----- .../utils_aiohttp.py | 3 +- .../version_control/_handlers.py | 4 +- .../version_control/models.py | 8 +-- .../wallets/_groups_handlers.py | 14 ++--- .../workspaces/_groups_db.py | 8 +-- .../workspaces/_groups_handlers.py | 14 ++--- .../workspaces/_workspaces_handlers.py | 16 +++--- .../isolated/test_studies_dispatcher_core.py | 4 +- .../unit/isolated/test_utils_rate_limiting.py | 5 +- .../version_control/test_version_control.py | 8 +-- 56 files changed, 383 insertions(+), 440 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/announcements/_models.py b/services/web/server/src/simcore_service_webserver/announcements/_models.py index 4edb7c8d20a..590958d39e1 100644 --- a/services/web/server/src/simcore_service_webserver/announcements/_models.py +++ b/services/web/server/src/simcore_service_webserver/announcements/_models.py @@ -1,8 +1,8 @@ from datetime import datetime -from typing import Any, ClassVar, Literal +from typing import Literal import arrow -from pydantic import BaseModel, validator +from pydantic import ConfigDict, BaseModel, field_validator # NOTE: this model is used for BOTH @@ -18,7 +18,7 @@ class Announcement(BaseModel): link: str widgets: list[Literal["login", "ribbon", "user-menu"]] - @validator("end") + @field_validator("end") @classmethod def check_start_before_end(cls, v, values): if start := values.get("start"): @@ -30,9 +30,8 @@ def check_start_before_end(cls, v, values): def expired(self) -> bool: return self.end <= arrow.utcnow().datetime - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "id": "Student_Competition_2023", @@ -56,3 +55,4 @@ class Config: }, ] } + ) diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index fcdec0f9eb3..3ed7074d16e 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -11,7 +11,7 @@ VersionTag, ) from models_library.utils.change_case import snake_to_camel -from pydantic import AnyHttpUrl, parse_obj_as, root_validator, validator +from pydantic import AliasChoices, TypeAdapter, field_validator, model_validator, AnyHttpUrl from pydantic.fields import Field, ModelField from pydantic.types import PositiveInt from settings_library.base import BaseCustomSettings @@ -53,7 +53,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = APP_NAME - API_VTAG: VersionTag = parse_obj_as(VersionTag, API_VTAG) + API_VTAG: VersionTag = TypeAdapter(VersionTag).validate_python(API_VTAG) # IMAGE BUILDTIME ------------------------------------------------------ # @Makefile @@ -119,52 +119,52 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="host name to serve within the container." "NOTE that this different from WEBSERVER_HOST env which is the host seen outside the container", ) - WEBSERVER_HOST: str | None = Field(None, env=["WEBSERVER_HOST", "HOST", "HOSTNAME"]) - WEBSERVER_PORT: PortInt = parse_obj_as(PortInt, DEFAULT_AIOHTTP_PORT) + WEBSERVER_HOST: str | None = Field(None, validation_alias=AliasChoices("WEBSERVER_HOST", "HOST", "HOSTNAME")) + WEBSERVER_PORT: PortInt = TypeAdapter(PortInt).validate_python(DEFAULT_AIOHTTP_PORT) WEBSERVER_FRONTEND: FrontEndAppSettings | None = Field( - auto_default_from_env=True, description="front-end static settings" + json_schema_extra={"auto_default_from_env": True}, description="front-end static settings" ) # PLUGINS ---------------- WEBSERVER_ACTIVITY: PrometheusSettings | None = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="activity plugin", ) WEBSERVER_CATALOG: CatalogSettings | None = Field( - auto_default_from_env=True, description="catalog service client's plugin" + json_schema_extra={"auto_default_from_env": True}, description="catalog service client's plugin" ) # TODO: Shall be required WEBSERVER_DB: PostgresSettings | None = Field( - auto_default_from_env=True, description="database plugin" + json_schema_extra={"auto_default_from_env": True}, description="database plugin" ) WEBSERVER_DIAGNOSTICS: DiagnosticsSettings | None = Field( - auto_default_from_env=True, description="diagnostics plugin" + json_schema_extra={"auto_default_from_env": True}, description="diagnostics plugin" ) WEBSERVER_DIRECTOR_V2: DirectorV2Settings | None = Field( - auto_default_from_env=True, description="director-v2 service client's plugin" + json_schema_extra={"auto_default_from_env": True}, description="director-v2 service client's plugin" ) WEBSERVER_EMAIL: SMTPSettings | None = Field( - auto_default_from_env=True, description="email plugin" + json_schema_extra={"auto_default_from_env": True}, description="email plugin" ) WEBSERVER_EXPORTER: ExporterSettings | None = Field( - auto_default_from_env=True, description="exporter plugin" + json_schema_extra={"auto_default_from_env": True}, description="exporter plugin" ) WEBSERVER_GARBAGE_COLLECTOR: GarbageCollectorSettings | None = Field( - auto_default_from_env=True, description="garbage collector plugin" + json_schema_extra={"auto_default_from_env": True}, description="garbage collector plugin" ) WEBSERVER_INVITATIONS: InvitationsSettings | None = Field( - auto_default_from_env=True, description="invitations plugin" + json_schema_extra={"auto_default_from_env": True}, description="invitations plugin" ) WEBSERVER_LOGIN: LoginSettings | None = Field( - auto_default_from_env=True, description="login plugin" + json_schema_extra={"auto_default_from_env": True}, description="login plugin" ) WEBSERVER_PAYMENTS: PaymentsSettings | None = Field( - auto_default_from_env=True, description="payments plugin settings" + json_schema_extra={"auto_default_from_env": True}, description="payments plugin settings" ) WEBSERVER_DYNAMIC_SCHEDULER: DynamicSchedulerSettings | None = Field( @@ -241,7 +241,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): "Currently this is a system plugin and cannot be disabled", ) - @root_validator() + @model_validator(mode="after") @classmethod def build_vcs_release_url_if_unset(cls, values): release_url = values.get("SIMCORE_VCS_RELEASE_URL") @@ -259,14 +259,13 @@ def build_vcs_release_url_if_unset(cls, values): return values - @validator( + @field_validator( # List of plugins under-development (keep up-to-date) # TODO: consider mark as dev-feature in field extras of Config attr. # Then they can be automtically advertised "WEBSERVER_META_MODELING", "WEBSERVER_VERSION_CONTROL", - pre=True, - always=True, + mode="before" ) @classmethod def enable_only_if_dev_features_allowed(cls, v, values, field: ModelField): @@ -286,12 +285,12 @@ def log_level(self) -> int: level: int = getattr(logging, self.WEBSERVER_LOGLEVEL.upper()) return level - @validator("WEBSERVER_LOGLEVEL") + @field_validator("WEBSERVER_LOGLEVEL") @classmethod def valid_log_level(cls, value): return cls.validate_log_level(value) - @validator("SC_HEALTHCHECK_TIMEOUT", pre=True) + @field_validator("SC_HEALTHCHECK_TIMEOUT", mode="before") @classmethod def get_healthcheck_timeout_in_seconds(cls, v): # Ex. HEALTHCHECK --interval=5m --timeout=3s diff --git a/services/web/server/src/simcore_service_webserver/catalog/_api.py b/services/web/server/src/simcore_service_webserver/catalog/_api.py index 391652ba4ef..7be53a81e4d 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_api.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_api.py @@ -23,7 +23,7 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pint import UnitRegistry -from pydantic import BaseModel +from pydantic import ConfigDict, BaseModel from servicelib.aiohttp.requests_validation import handle_validation_as_http_error from servicelib.rabbitmq.rpc_interfaces.catalog import services as catalog_rpc from servicelib.rest_constants import RESPONSE_MODEL_POLICY @@ -42,9 +42,7 @@ class CatalogRequestContext(BaseModel): user_id: UserID product_name: str unit_registry: UnitRegistry - - class Config: - arbitrary_types_allowed = True + model_config = ConfigDict(arbitrary_types_allowed=True) @classmethod def create(cls, request: Request) -> "CatalogRequestContext": diff --git a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py index 9dbc05d20fb..3106eaa1a53 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_handlers.py @@ -26,7 +26,7 @@ ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from pydantic import BaseModel, Extra, Field, parse_obj_as, validator +from pydantic import TypeAdapter, field_validator, ConfigDict, BaseModel, Field from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -54,12 +54,9 @@ class ServicePathParams(BaseModel): service_key: ServiceKey service_version: ServiceVersion + model_config = ConfigDict(populate_by_name=True, extra="forbid") - class Config: - allow_population_by_field_name = True - extra = Extra.forbid - - @validator("service_key", pre=True) + @field_validator("service_key", mode="before") @classmethod def ensure_unquoted(cls, v): # NOTE: this is needed as in pytest mode, the aiohttp server does not seem to unquote automatically @@ -90,7 +87,7 @@ async def list_services_latest(request: Request): user_id=request_ctx.user_id, product_name=request_ctx.product_name, unit_registry=request_ctx.unit_registry, - page_params=PageQueryParameters.construct( + page_params=PageQueryParameters.model_construct( offset=query_params.offset, limit=query_params.limit ), ) @@ -160,7 +157,7 @@ async def update_service(request: Request): product_name=request_ctx.product_name, service_key=path_params.service_key, service_version=path_params.service_version, - update_data=update.dict(exclude_unset=True), + update_data=update.model_dump(exclude_unset=True), unit_registry=request_ctx.unit_registry, ) @@ -182,7 +179,7 @@ async def list_service_inputs(request: Request): path_params.service_key, path_params.service_version, ctx ) - data = [m.dict(**RESPONSE_MODEL_POLICY) for m in response_model] + data = [m.model_dump(**RESPONSE_MODEL_POLICY) for m in response_model] return await asyncio.get_event_loop().run_in_executor( None, envelope_json_response, data ) @@ -210,7 +207,7 @@ async def get_service_input(request: Request): ctx, ) - data = response_model.dict(**RESPONSE_MODEL_POLICY) + data = response_model.model_dump(**RESPONSE_MODEL_POLICY) return await asyncio.get_event_loop().run_in_executor( None, envelope_json_response, data ) @@ -265,7 +262,7 @@ async def list_service_outputs(request: Request): path_params.service_key, path_params.service_version, ctx ) - data = [m.dict(**RESPONSE_MODEL_POLICY) for m in response_model] + data = [m.model_dump(**RESPONSE_MODEL_POLICY) for m in response_model] return await asyncio.get_event_loop().run_in_executor( None, envelope_json_response, data ) @@ -293,7 +290,7 @@ async def get_service_output(request: Request): ctx, ) - data = response_model.dict(**RESPONSE_MODEL_POLICY) + data = response_model.model_dump(**RESPONSE_MODEL_POLICY) return await asyncio.get_event_loop().run_in_executor( None, envelope_json_response, data ) @@ -386,4 +383,4 @@ async def get_service_pricing_plan(request: Request): service_version=f"{path_params.service_version}", ) - return envelope_json_response(parse_obj_as(PricingPlanGet, pricing_plan)) + return envelope_json_response(TypeAdapter(PricingPlanGet).validate_python(pricing_plan)) diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/settings.py b/services/web/server/src/simcore_service_webserver/diagnostics/settings.py index 777a1ac9457..4bd1133b6e7 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/settings.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/settings.py @@ -1,5 +1,5 @@ from aiohttp.web import Application -from pydantic import Field, NonNegativeFloat, PositiveFloat, validator +from pydantic import AliasChoices, Field, NonNegativeFloat, PositiveFloat, field_validator from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings @@ -11,7 +11,7 @@ class DiagnosticsSettings(BaseCustomSettings): "Any task blocked more than slow_duration_secs is logged as WARNING" "Aims to identify possible blocking calls" ), - env=["DIAGNOSTICS_SLOW_DURATION_SECS", "AIODEBUG_SLOW_DURATION_SECS"], + validation_alias=AliasChoices("DIAGNOSTICS_SLOW_DURATION_SECS", "AIODEBUG_SLOW_DURATION_SECS"), ) DIAGNOSTICS_MAX_TASK_DELAY: PositiveFloat = Field( @@ -25,7 +25,7 @@ class DiagnosticsSettings(BaseCustomSettings): DIAGNOSTICS_START_SENSING_DELAY: NonNegativeFloat = 60.0 - @validator("DIAGNOSTICS_MAX_TASK_DELAY", pre=True) + @field_validator("DIAGNOSTICS_MAX_TASK_DELAY", mode="before") @classmethod def validate_max_task_delay(cls, v, values): # Sets an upper threshold for blocking functions, i.e. diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_models.py b/services/web/server/src/simcore_service_webserver/director_v2/_models.py index 70dd53ff5fd..8d2c5fbf42c 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_models.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_models.py @@ -10,7 +10,7 @@ ExternalClusterAuthentication, ) from models_library.users import GroupID -from pydantic import AnyHttpUrl, BaseModel, Field, validator +from pydantic import AnyHttpUrl, BaseModel, ConfigDict, Field, field_validator from pydantic.networks import AnyUrl, HttpUrl from simcore_postgres_database.models.clusters import ClusterType @@ -33,7 +33,7 @@ class ClusterCreate(BaseCluster): alias="accessRights", default_factory=dict ) - @validator("thumbnail", always=True, pre=True) + @field_validator("thumbnail", mode="before") @classmethod def set_default_thumbnail_if_empty(cls, v, values): if v is None and ( @@ -42,8 +42,8 @@ def set_default_thumbnail_if_empty(cls, v, values): return _DEFAULT_THUMBNAILS[f"{cluster_type}"] return v - class Config(BaseCluster.Config): - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "name": "My awesome cluster", @@ -74,6 +74,7 @@ class Config(BaseCluster.Config): }, ] } + ) class ClusterPatch(BaseCluster): diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py index 68f2788c850..a0e5fbcbd28 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py @@ -16,7 +16,7 @@ from models_library.users import UserID from models_library.utils.common_validators import null_or_none_str_to_none_validator from models_library.workspaces import WorkspaceID -from pydantic import Extra, Field, Json, parse_obj_as, validator +from pydantic import TypeAdapter, field_validator, ConfigDict, Field, Json from servicelib.aiohttp.requests_validation import ( RequestParams, StrictRequestParams, @@ -89,7 +89,7 @@ class FolderListWithJsonStrQueryParams(PageQueryParameters): order_by: Json[OrderBy] = Field( default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC), description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.", - example='{"field": "name", "direction": "desc"}', + examples=['{"field": "name", "direction": "desc"}'], alias="order_by", ) folder_id: FolderID | None = Field( @@ -101,7 +101,7 @@ class FolderListWithJsonStrQueryParams(PageQueryParameters): description="List folders in specific workspace. By default, list in the user private workspace", ) - @validator("order_by", check_fields=False) + @field_validator("order_by", check_fields=False) @classmethod def validate_order_by_field(cls, v): if v.field not in { @@ -114,17 +114,15 @@ def validate_order_by_field(cls, v): if v.field == "modified_at": v.field = "modified" return v - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") # validators - _null_or_none_str_to_none_validator = validator( - "folder_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "folder_id", mode="before" )(null_or_none_str_to_none_validator) - _null_or_none_str_to_none_validator2 = validator( - "workspace_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator2 = field_validator( + "workspace_id", mode="before" )(null_or_none_str_to_none_validator) @@ -166,7 +164,7 @@ async def list_folders(request: web.Request): workspace_id=query_params.workspace_id, offset=query_params.offset, limit=query_params.limit, - order_by=parse_obj_as(OrderBy, query_params.order_by), + order_by=TypeAdapter(OrderBy).validate_python(query_params.order_by), ) page = Page[FolderGet].model_validate( diff --git a/services/web/server/src/simcore_service_webserver/groups/_classifiers.py b/services/web/server/src/simcore_service_webserver/groups/_classifiers.py index 5ac89e0ee94..a8e666357cf 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_classifiers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_classifiers.py @@ -10,19 +10,16 @@ import logging import re -from typing import Any, Final, Literal +from typing import Annotated, Any, Final, Literal, TypeAlias import sqlalchemy as sa from aiohttp import web from aiopg.sa.result import RowProxy from pydantic import ( - BaseModel, - ConstrainedStr, + StringConstraints, TypeAdapter, field_validator, BaseModel, Field, HttpUrl, ValidationError, - parse_obj_as, - validator, ) from simcore_postgres_database.models.classifiers import group_classifiers @@ -37,8 +34,7 @@ # DOMAIN MODELS --- -class TreePath(ConstrainedStr): - regex = re.compile(r"[\w:]+") # Examples 'a::b::c +TreePath: TypeAlias = Annotated[str, StringConstraints(pattern=r"[\w:]+")] # Examples 'a::b::c class ClassifierItem(BaseModel): @@ -50,10 +46,10 @@ class ClassifierItem(BaseModel): url: HttpUrl | None = Field( None, description="Link to more information", - example="https://scicrunch.org/resources/Any/search?q=osparc&l=osparc", + examples=["https://scicrunch.org/resources/Any/search?q=osparc&l=osparc"], ) - @validator("short_description", pre=True) + @field_validator("short_description", mode="before") @classmethod def truncate_to_short(cls, v): if v and len(v) >= MAX_SIZE_SHORT_MSG: @@ -91,7 +87,7 @@ async def get_classifiers_from_bundle(self, gid: int) -> dict[str, Any]: if bundle: try: # truncate bundle to what is needed and drop the rest - return Classifiers(**bundle).dict(exclude_unset=True, exclude_none=True) + return Classifiers(**bundle).model_dump(exclude_unset=True, exclude_none=True) except ValidationError as err: _logger.error( "DB corrupt data in 'groups_classifiers' table. " @@ -136,7 +132,7 @@ async def build_rrids_tree_view( url=scicrunch.get_resolver_web_url(resource.rrid), ) - node = parse_obj_as(TreePath, validated_item.display_name.replace(":", " ")) + node = TypeAdapter(TreePath).validate_python(validated_item.display_name.replace(":", " ")) flat_tree_view[node] = validated_item except ValidationError as err: @@ -144,4 +140,4 @@ async def build_rrids_tree_view( "Cannot convert RRID into a classifier item. Skipping. Details: %s", err ) - return Classifiers.construct(classifiers=flat_tree_view).dict(exclude_unset=True) + return Classifiers.model_construct(classifiers=flat_tree_view).model_dump(exclude_unset=True) diff --git a/services/web/server/src/simcore_service_webserver/groups/_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_handlers.py index 47d7ab89b53..ed343c050a8 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_handlers.py @@ -12,7 +12,7 @@ from models_library.emails import LowerCaseEmailStr from models_library.users import GroupID, UserID from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Extra, Field, parse_obj_as +from pydantic import ConfigDict, BaseModel, Field, TypeAdapter from servicelib.aiohttp.requests_validation import ( parse_request_path_parameters_as, parse_request_query_parameters_as, @@ -103,15 +103,13 @@ async def list_groups(request: web.Request): product_gid=product.group_id, ) - assert parse_obj_as(AllUsersGroups, result) is not None # nosec + assert TypeAdapter(AllUsersGroups).validate_python(result) is not None # nosec return result class _GroupPathParams(BaseModel): gid: GroupID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.get(f"/{API_VTAG}/groups/{{gid}}", name="get_group") @@ -124,7 +122,7 @@ async def get_group(request: web.Request): path_params = parse_request_path_parameters_as(_GroupPathParams, request) group = await api.get_user_group(request.app, req_ctx.user_id, path_params.gid) - assert parse_obj_as(UsersGroup, group) is not None # nosec + assert TypeAdapter(UsersGroup).validate_python(group) is not None # nosec return group @@ -138,7 +136,7 @@ async def create_group(request: web.Request): new_group = await request.json() created_group = await api.create_user_group(request.app, req_ctx.user_id, new_group) - assert parse_obj_as(UsersGroup, created_group) is not None # nosec + assert TypeAdapter(UsersGroup).validate_python(created_group) is not None # nosec raise web.HTTPCreated( text=json_dumps({"data": created_group}), content_type=MIMETYPE_APPLICATION_JSON ) @@ -156,7 +154,7 @@ async def update_group(request: web.Request): updated_group = await api.update_user_group( request.app, req_ctx.user_id, path_params.gid, new_group_values ) - assert parse_obj_as(UsersGroup, updated_group) is not None # nosec + assert TypeAdapter(UsersGroup).validate_python(updated_group) is not None # nosec return envelope_json_response(updated_group) @@ -183,7 +181,7 @@ async def get_group_users(request: web.Request): group_user = await api.list_users_in_group( request.app, req_ctx.user_id, path_params.gid ) - assert parse_obj_as(list[GroupUserGet], group_user) is not None # nosec + assert TypeAdapter(list[GroupUserGet]).validate_python(group_user) is not None # nosec return envelope_json_response(group_user) @@ -203,7 +201,7 @@ async def add_group_user(request: web.Request): new_user_id = new_user_in_group["uid"] if "uid" in new_user_in_group else None new_user_email = ( - parse_obj_as(LowerCaseEmailStr, new_user_in_group["email"]) + TypeAdapter(LowerCaseEmailStr).validate_python(new_user_in_group["email"]) if "email" in new_user_in_group else None ) @@ -221,9 +219,7 @@ async def add_group_user(request: web.Request): class _GroupUserPathParams(BaseModel): gid: GroupID uid: UserID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.get(f"/{API_VTAG}/groups/{{gid}}/users/{{uid}}", name="get_group_user") @@ -239,7 +235,7 @@ async def get_group_user(request: web.Request): user = await api.get_user_in_group( request.app, req_ctx.user_id, path_params.gid, path_params.uid ) - assert parse_obj_as(GroupUserGet, user) is not None # nosec + assert TypeAdapter(GroupUserGet).validate_python(user) is not None # nosec return envelope_json_response(user) @@ -261,7 +257,7 @@ async def update_group_user(request: web.Request): path_params.uid, new_values_for_user_in_group, ) - assert parse_obj_as(GroupUserGet, user) is not None # nosec + assert TypeAdapter(GroupUserGet).validate_python(user) is not None # nosec return envelope_json_response(user) @@ -351,7 +347,7 @@ async def get_scicrunch_resource(request: web.Request): scicrunch = SciCrunch.get_instance(request.app) resource = await scicrunch.get_resource_fields(rrid) - return envelope_json_response(resource.dict()) + return envelope_json_response(resource.model_dump()) @routes.post( @@ -375,7 +371,7 @@ async def add_scicrunch_resource(request: web.Request): # insert new or if exists, then update await repo.upsert(resource) - return envelope_json_response(resource.dict()) + return envelope_json_response(resource.model_dump()) @routes.get( @@ -391,4 +387,4 @@ async def search_scicrunch_resources(request: web.Request): scicrunch = SciCrunch.get_instance(request.app) hits: list[ResourceHit] = await scicrunch.search_resource(guess_name) - return envelope_json_response([hit.dict() for hit in hits]) + return envelope_json_response([hit.model_dump() for hit in hits]) diff --git a/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py b/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py index a5a69504e99..953f3730a7c 100644 --- a/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/_auth_handlers.py @@ -275,7 +275,7 @@ async def login_2fa(request: web.Request): class LogoutBody(InputSchema): client_session_id: str | None = Field( - None, example="5ac57685-c40f-448f-8711-70be1936fd63" + None, examples=["5ac57685-c40f-448f-8711-70be1936fd63"] ) diff --git a/services/web/server/src/simcore_service_webserver/login/_models.py b/services/web/server/src/simcore_service_webserver/login/_models.py index 2ac7b94f11a..6787a207aac 100644 --- a/services/web/server/src/simcore_service_webserver/login/_models.py +++ b/services/web/server/src/simcore_service_webserver/login/_models.py @@ -1,15 +1,12 @@ from typing import Any, Callable -from pydantic import BaseModel, Extra, SecretStr +from pydantic import ConfigDict, BaseModel, SecretStr from ._constants import MSG_PASSWORD_MISMATCH class InputSchema(BaseModel): - class Config: - allow_population_by_field_name = False - extra = Extra.forbid - allow_mutations = False + model_config = ConfigDict(populate_by_name=False, extra="forbid", frozen=True) def create_password_match_validator( diff --git a/services/web/server/src/simcore_service_webserver/login/_registration.py b/services/web/server/src/simcore_service_webserver/login/_registration.py index 10089caae6f..436b3c863a7 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration.py @@ -14,13 +14,11 @@ from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName from pydantic import ( - BaseModel, + TypeAdapter, field_validator, BaseModel, Field, Json, PositiveInt, ValidationError, - parse_obj_as, - validator, ) from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.confirmations import ConfirmationAction @@ -76,7 +74,7 @@ class _InvitationValidator(BaseModel): action: ConfirmationAction data: Json[InvitationData] # pylint: disable=unsubscriptable-object - @validator("action", pre=True) + @field_validator("action", mode="before") @classmethod def ensure_enum(cls, v): if isinstance(v, ConfirmationAction): @@ -190,7 +188,7 @@ async def create_invitation_token( return await db.create_confirmation( user_id=user_id, action=ConfirmationAction.INVITATION.name, - data=data_model.json(), + data=data_model.model_dump_json(), ) @@ -233,7 +231,7 @@ async def extract_email_from_invitation( """Returns associated email""" with _invitations_request_context(invitation_code=invitation_code) as url: content = await extract_invitation(app, invitation_url=f"{url}") - return parse_obj_as(LowerCaseEmailStr, content.guest) + return TypeAdapter(LowerCaseEmailStr).validate_python(content.guest) async def check_and_consume_invitation( diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py index d3f553db71c..c9a373bbb63 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py @@ -1,11 +1,11 @@ import logging -from datetime import datetime, timedelta -from typing import Any, ClassVar, Literal +from datetime import datetime, timedelta, timezone +from typing import Literal from aiohttp import web from aiohttp.web import RouteTableDef from models_library.emails import LowerCaseEmailStr -from pydantic import BaseModel, Field, PositiveInt, SecretStr, validator +from pydantic import ConfigDict, BaseModel, Field, PositiveInt, SecretStr, field_validator from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_body_as from servicelib.error_codes import create_error_code @@ -114,12 +114,11 @@ class RegisterBody(InputSchema): confirm: SecretStr | None = Field(None, description="Password confirmation") invitation: str | None = Field(None, description="Invitation code") - _password_confirm_match = validator("confirm", allow_reuse=True)( + _password_confirm_match = field_validator("confirm")( check_confirm_password_match ) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "email": "foo@mymail.com", @@ -129,6 +128,7 @@ class Config: } ] } + ) @routes.post(f"/{API_VTAG}/auth/register", name="auth_register") @@ -203,7 +203,7 @@ async def register(request: web.Request): app=request.app, ) if invitation.trial_account_days: - expires_at = datetime.utcnow() + timedelta(invitation.trial_account_days) + expires_at = datetime.now(timezone.utc) + timedelta(invitation.trial_account_days) # get authorized user or create new user = await _auth_api.get_user_by_email(request.app, email=registration.email) diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py index c32ce319c7f..307e5424cff 100644 --- a/services/web/server/src/simcore_service_webserver/login/settings.py +++ b/services/web/server/src/simcore_service_webserver/login/settings.py @@ -2,7 +2,7 @@ from typing import Final, Literal from aiohttp import web -from pydantic import BaseModel, validator +from pydantic import BaseModel, field_validator from pydantic.fields import Field from pydantic.types import PositiveFloat, PositiveInt, SecretStr from settings_library.base import BaseCustomSettings @@ -54,7 +54,7 @@ class LoginSettings(BaseCustomSettings): description="Minimum length of password", ) - @validator("LOGIN_2FA_REQUIRED") + @field_validator("LOGIN_2FA_REQUIRED") @classmethod def login_2fa_needs_email_registration(cls, v, values): # NOTE: this constraint ensures that a phone is registered in current workflow @@ -63,7 +63,7 @@ def login_2fa_needs_email_registration(cls, v, values): raise ValueError(msg) return v - @validator("LOGIN_2FA_REQUIRED") + @field_validator("LOGIN_2FA_REQUIRED") @classmethod def login_2fa_needs_sms_service(cls, v, values): if v and values.get("LOGIN_TWILIO") is None: diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py index 91ca21e076c..13d5a29884a 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_handlers.py @@ -9,7 +9,7 @@ from models_library.projects import ProjectID from models_library.rest_pagination import Page, PageQueryParameters from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, ValidationError, validator +from pydantic import field_validator, BaseModel, ValidationError from pydantic.fields import Field from pydantic.networks import HttpUrl from servicelib.rest_constants import RESPONSE_MODEL_POLICY @@ -33,7 +33,7 @@ class ParametersModel(PageQueryParameters): project_uuid: ProjectID ref_id: CommitID - @validator("ref_id", pre=True) + @field_validator("ref_id", mode="before") @classmethod def tags_as_refid_not_implemented(cls, v): try: diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py index 68829e3489a..f5b65473cef 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_results.py @@ -7,18 +7,16 @@ import logging -from typing import Any +from typing import Annotated, Any, TypeAlias from models_library.projects_nodes import OutputsDict from models_library.projects_nodes_io import NodeIDStr -from pydantic import BaseModel, ConstrainedInt, Field +from pydantic import ConfigDict, BaseModel, Field _logger = logging.getLogger(__name__) -class ProgressInt(ConstrainedInt): - ge = 0 - le = 100 +ProgressInt: TypeAlias = Annotated[int, Field(ge = 0, le = 100)] class ExtractedResults(BaseModel): @@ -31,32 +29,31 @@ class ExtractedResults(BaseModel): values: dict[NodeIDStr, OutputsDict] = Field( ..., description="Captured outputs per node" ) - - class Config: - schema_extra = { - "example": { - # sample with 2 computational services, 2 data sources (iterator+parameter) and 2 observers (probes) - "progress": { - "4c08265a-427b-4ac3-9eab-1d11c822ada4": 0, - "e33c6880-1b1d-4419-82d7-270197738aa9": 100, - }, - "labels": { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": "Integer iterator", - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": "Probe Sensor - Integer", - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": "Probe Sensor - Integer_2", - "d76fca06-f050-4790-88a8-0aac10c87b39": "Boolean Parameter", - }, - "values": { - "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": { - "out_1": 1, - "out_2": [3, 4], - }, - "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": {"in_1": 7}, - "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": {"in_1": 1}, - "d76fca06-f050-4790-88a8-0aac10c87b39": {"out_1": True}, + model_config = ConfigDict( + json_schema_extra={ + "example": { + # sample with 2 computational services, 2 data sources (iterator+parameter) and 2 observers (probes) + "progress": { + "4c08265a-427b-4ac3-9eab-1d11c822ada4": 0, + "e33c6880-1b1d-4419-82d7-270197738aa9": 100, + }, + "labels": { + "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": "Integer iterator", + "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": "Probe Sensor - Integer", + "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": "Probe Sensor - Integer_2", + "d76fca06-f050-4790-88a8-0aac10c87b39": "Boolean Parameter", + }, + "values": { + "0f1e38c9-dcb7-443c-a745-91b97ac28ccc": { + "out_1": 1, + "out_2": [3, 4], }, - } + "2d0ce8b9-c9c3-43ce-ad2f-ad493898de37": {"in_1": 7}, + "445b44d1-59b3-425c-ac48-7c13e0f2ea5b": {"in_1": 1}, + "d76fca06-f050-4790-88a8-0aac10c87b39": {"out_1": True}, + }, } + }) def extract_project_results(workbench: dict[str, Any]) -> ExtractedResults: diff --git a/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py b/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py index 8aec3e45359..5b5df725429 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py @@ -6,7 +6,7 @@ from models_library.basic_types import NonNegativeDecimal from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, PositiveInt +from pydantic import ConfigDict, BaseModel, PositiveInt from simcore_postgres_database.utils_payments_autorecharge import AutoRechargeStmts from ..db.plugin import get_database_engine @@ -23,10 +23,8 @@ class PaymentsAutorechargeDB(BaseModel): enabled: bool primary_payment_method_id: PaymentMethodID top_up_amount_in_usd: NonNegativeDecimal - monthly_limit_in_usd: NonNegativeDecimal | None - - class Config: - orm_mode = True + monthly_limit_in_usd: NonNegativeDecimal | None = None + model_config = ConfigDict(from_attributes=True) async def get_wallet_autorecharge( diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py index b5838eb171c..f05b3204414 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py @@ -8,7 +8,7 @@ from models_library.api_schemas_webserver.wallets import PaymentMethodID from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, parse_obj_as +from pydantic import ConfigDict, BaseModel, TypeAdapter from simcore_postgres_database.models.payments_methods import ( InitPromptAckFlowState, payments_methods, @@ -36,8 +36,9 @@ class PaymentsMethodsDB(BaseModel): state: InitPromptAckFlowState state_message: str | None - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) async def insert_init_payment_method( @@ -81,7 +82,7 @@ async def list_successful_payment_methods( .order_by(payments_methods.c.created.desc()) ) # newest first rows = await result.fetchall() or [] - return parse_obj_as(list[PaymentsMethodsDB], rows) + return TypeAdapter(list[PaymentsMethodsDB]).validate_python(rows) async def get_successful_payment_method( @@ -104,7 +105,7 @@ async def get_successful_payment_method( if row is None: raise PaymentMethodNotFoundError(payment_method_id=payment_method_id) - return PaymentsMethodsDB.from_orm(row) + return PaymentsMethodsDB.model_validate(row) async def get_pending_payment_methods_ids( @@ -117,7 +118,7 @@ async def get_pending_payment_methods_ids( .order_by(payments_methods.c.initiated_at.asc()) # oldest first ) rows = await result.fetchall() or [] - return [parse_obj_as(PaymentMethodID, row.payment_method_id) for row in rows] + return [TypeAdapter(PaymentMethodID).validate_python(row.payment_method_id) for row in rows] async def udpate_payment_method( @@ -168,7 +169,7 @@ async def udpate_payment_method( row = await result.first() assert row, "execute above should have caught this" # nosec - return PaymentsMethodsDB.from_orm(row) + return PaymentsMethodsDB.model_validate(row) async def delete_payment_method( diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py index 9f94d46b707..abcf2b464d4 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py @@ -9,7 +9,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import BaseModel, HttpUrl, PositiveInt, parse_obj_as +from pydantic import ConfigDict, BaseModel, HttpUrl, PositiveInt, parse_obj_as from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, payments_transactions, @@ -44,9 +44,7 @@ class PaymentsTransactionsDB(BaseModel): completed_at: datetime.datetime | None state: PaymentTransactionState state_message: str | None - - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) async def list_user_payment_transactions( diff --git a/services/web/server/src/simcore_service_webserver/payments/settings.py b/services/web/server/src/simcore_service_webserver/payments/settings.py index 846e2b1e9f9..4f069cc921f 100644 --- a/services/web/server/src/simcore_service_webserver/payments/settings.py +++ b/services/web/server/src/simcore_service_webserver/payments/settings.py @@ -3,7 +3,7 @@ from aiohttp import web from models_library.basic_types import NonNegativeDecimal -from pydantic import Field, HttpUrl, PositiveInt, SecretStr, parse_obj_as, validator +from pydantic import TypeAdapter, field_validator, Field, HttpUrl, PositiveInt, SecretStr from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt, VersionTag from settings_library.utils_service import ( @@ -18,7 +18,7 @@ class PaymentsSettings(BaseCustomSettings, MixinServiceSettings): PAYMENTS_HOST: str = "payments" PAYMENTS_PORT: PortInt = DEFAULT_FASTAPI_PORT - PAYMENTS_VTAG: VersionTag = parse_obj_as(VersionTag, "v1") + PAYMENTS_VTAG: VersionTag = TypeAdapter(VersionTag).validate_python("v1") PAYMENTS_USERNAME: str = Field( ..., @@ -42,7 +42,7 @@ class PaymentsSettings(BaseCustomSettings, MixinServiceSettings): ) PAYMENTS_FAKE_GATEWAY_URL: HttpUrl = Field( - default=parse_obj_as(HttpUrl, "https://fake-payment-gateway.com"), + default=TypeAdapter(HttpUrl).validate_python("https://fake-payment-gateway.com"), description="FAKE Base url to the payment gateway", ) @@ -82,7 +82,7 @@ def base_url(self) -> str: ) return base_url_without_vtag - @validator("PAYMENTS_FAKE_COMPLETION") + @field_validator("PAYMENTS_FAKE_COMPLETION") @classmethod def _payments_cannot_be_faken_in_production(cls, v): if v is True and "production" in os.environ.get("SWARM_STACK_NAME", ""): @@ -90,7 +90,7 @@ def _payments_cannot_be_faken_in_production(cls, v): raise ValueError(msg) return v - @validator("PAYMENTS_AUTORECHARGE_DEFAULT_MONTHLY_LIMIT") + @field_validator("PAYMENTS_AUTORECHARGE_DEFAULT_MONTHLY_LIMIT") @classmethod def _monthly_limit_greater_than_top_up(cls, v, values): top_up = values["PAYMENTS_AUTORECHARGE_DEFAULT_TOP_UP_AMOUNT"] diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py index de3955652a3..ec566b06884 100644 --- a/services/web/server/src/simcore_service_webserver/products/_model.py +++ b/services/web/server/src/simcore_service_webserver/products/_model.py @@ -14,7 +14,7 @@ from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, Extra, Field, PositiveInt, validator +from pydantic import field_validator, ConfigDict, BaseModel, Field, PositiveInt, field_validator from simcore_postgres_database.models.products import ( EmailFeedback, Forum, @@ -40,12 +40,12 @@ class Product(BaseModel): SEE descriptions in packages/postgres-database/src/simcore_postgres_database/models/products.py """ - name: ProductName = Field(regex=PUBLIC_VARIABLE_NAME_RE) + name: ProductName = Field(pattern=PUBLIC_VARIABLE_NAME_RE, validate_default=True) display_name: str = Field(..., description="Long display name") short_name: str | None = Field( None, - regex=TWILIO_ALPHANUMERIC_SENDER_ID_RE, + pattern=TWILIO_ALPHANUMERIC_SENDER_ID_RE, min_length=2, max_length=11, description="Short display name for SMS", @@ -109,7 +109,7 @@ class Product(BaseModel): description="Price of the credits in this product given in credit/USD. None for free product.", ) - @validator("*", pre=True) + @field_validator("*", mode="before") @classmethod def parse_empty_string_as_null(cls, v): """Safe measure: database entries are sometimes left blank instead of null""" @@ -117,7 +117,7 @@ def parse_empty_string_as_null(cls, v): return None return v - @validator("name", pre=True, always=True) + @field_validator("name", mode="before") @classmethod def validate_name(cls, v): if v not in FRONTEND_APPS_AVAILABLE: @@ -128,14 +128,13 @@ def validate_name(cls, v): @property def twilio_alpha_numeric_sender_id(self) -> str: return self.short_name or self.display_name.replace(string.punctuation, "")[:11] - - class Config: - alias_generator = snake_to_camel # to export - allow_population_by_field_name = True - frozen = True # read-only - orm_mode = True - extra = Extra.ignore - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + alias_generator=snake_to_camel, + populate_by_name=True, + frozen=True, + from_attributes=True, + extra="ignore", + json_schema_extra={ "examples": [ { # fake mandatory @@ -225,7 +224,8 @@ class Config: "is_payment_enabled": False, }, ] - } + }, + ) # helpers ---- diff --git a/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py index 4ee774a22a2..f4eff519a02 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_comments_handlers.py @@ -15,7 +15,7 @@ Page, ) from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, Extra, Field, NonNegativeInt +from pydantic import ConfigDict, BaseModel, Field, NonNegativeInt from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -59,24 +59,18 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class _ProjectCommentsPathParams(BaseModel): project_uuid: ProjectID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class _ProjectCommentsWithCommentPathParams(BaseModel): project_uuid: ProjectID comment_id: CommentID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class _ProjectCommentsBodyParams(BaseModel): contents: str - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.post( @@ -118,9 +112,7 @@ class _ListProjectCommentsQueryParams(BaseModel): offset: NonNegativeInt = Field( default=0, description="index to the first item to return (pagination)" ) - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.get(f"/{VTAG}/projects/{{project_uuid}}/comments", name="list_project_comments") diff --git a/services/web/server/src/simcore_service_webserver/projects/_common_models.py b/services/web/server/src/simcore_service_webserver/projects/_common_models.py index d25a0f6c24b..57c02680f72 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_common_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_common_models.py @@ -6,7 +6,7 @@ from models_library.projects import ProjectID from models_library.users import UserID -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, BaseModel, Field from servicelib.request_keys import RQT_USERID_KEY from .._constants import RQ_PRODUCT_KEY @@ -19,7 +19,4 @@ class RequestContext(BaseModel): class ProjectPathParams(BaseModel): project_id: ProjectID - - class Config: - allow_population_by_field_name = True - extra = Extra.forbid + model_config = ConfigDict(populate_by_name=True, extra="forbid") diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py index 6e89c6eb7c8..28571e8d14d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py @@ -14,7 +14,7 @@ from models_library.rest_pagination import PageQueryParameters from models_library.utils.common_validators import null_or_none_str_to_none_validator from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, Extra, Field, Json, root_validator, validator +from pydantic import field_validator, ConfigDict, BaseModel, Field, Json, model_validator from servicelib.common_headers import ( UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_PARENT_NODE_ID, @@ -44,7 +44,7 @@ class ProjectCreateHeaders(BaseModel): alias=X_SIMCORE_PARENT_NODE_ID, ) - @root_validator + @model_validator(mode="before") @classmethod def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: if ( @@ -57,9 +57,7 @@ def check_parent_valid(cls, values: dict[str, Any]) -> dict[str, Any]: msg = "Both parent_project_uuid and parent_node_id must be set or both null or both unset" raise ValueError(msg) return values - - class Config: - allow_population_by_field_name = False + model_config = ConfigDict(populate_by_name=False) class ProjectCreateParams(BaseModel): @@ -79,9 +77,7 @@ class ProjectCreateParams(BaseModel): default=False, description="Enables/disables hidden flag. Hidden projects are by default unlisted", ) - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class ProjectListParams(PageQueryParameters): @@ -93,30 +89,32 @@ class ProjectListParams(PageQueryParameters): default=None, description="Multi column full text search", max_length=100, - example="My Project", + examples=["My Project"], ) folder_id: FolderID | None = Field( default=None, description="Filter projects in specific folder. Default filtering is a root directory.", + validate_default=True, ) workspace_id: WorkspaceID | None = Field( default=None, description="Filter projects in specific workspace. Default filtering is a private workspace.", + validate_default=True, ) - @validator("search", pre=True) + @field_validator("search", mode="before") @classmethod def search_check_empty_string(cls, v): if not v: return None return v - _null_or_none_str_to_none_validator = validator( - "folder_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "folder_id", mode="before" )(null_or_none_str_to_none_validator) - _null_or_none_str_to_none_validator2 = validator( - "workspace_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator2 = field_validator( + "workspace_id", mode="before" )(null_or_none_str_to_none_validator) @@ -124,11 +122,12 @@ class ProjectListWithJsonStrParams(ProjectListParams): order_by: Json[OrderBy] = Field( # pylint: disable=unsubscriptable-object default=OrderBy(field=IDStr("last_change_date"), direction=OrderDirection.DESC), description="Order by field (type|uuid|name|description|prj_owner|creation_date|last_change_date) and direction (asc|desc). The default sorting order is ascending.", - example='{"field": "prj_owner", "direction": "desc"}', + examples=['{"field": "prj_owner", "direction": "desc"}'], alias="order_by", ) - @validator("order_by", check_fields=False) + @field_validator("order_by", check_fields=False) + @classmethod @classmethod def validate_order_by_field(cls, v): if v.field not in { @@ -143,9 +142,7 @@ def validate_order_by_field(cls, v): msg = f"We do not support ordering by provided field {v.field}" raise ValueError(msg) return v - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class ProjectActiveParams(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py index 0c5d7960a5b..8a0e4caad0f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_handlers.py @@ -5,7 +5,7 @@ from models_library.folders import FolderID from models_library.projects import ProjectID from models_library.utils.common_validators import null_or_none_str_to_none_validator -from pydantic import BaseModel, Extra, validator +from pydantic import ConfigDict, BaseModel, field_validator from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON @@ -41,13 +41,11 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class _ProjectsFoldersPathParams(BaseModel): project_id: ProjectID folder_id: FolderID | None - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") # validators - _null_or_none_str_to_none_validator = validator( - "folder_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "folder_id", mode="before" )(null_or_none_str_to_none_validator) diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py index 97a1120e623..5845e8fb896 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_handlers.py @@ -8,7 +8,7 @@ from aiohttp import web from models_library.projects import ProjectID from models_library.users import GroupID -from pydantic import BaseModel, Extra +from pydantic import ConfigDict, BaseModel from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -53,18 +53,14 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class _ProjectsGroupsPathParams(BaseModel): project_id: ProjectID group_id: GroupID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class _ProjectsGroupsBodyParams(BaseModel): read: bool write: bool delete: bool - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.post( diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py index ab6ba4b7d93..7161727722f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py @@ -14,14 +14,12 @@ from models_library.projects_nodes_io import NodeID, SimCoreFileLink from models_library.users import UserID from pydantic import ( - BaseModel, + TypeAdapter, model_validator, BaseModel, Field, HttpUrl, NonNegativeFloat, NonNegativeInt, ValidationError, - parse_obj_as, - root_validator, ) from servicelib.utils import logged_gather @@ -96,10 +94,10 @@ class NodeScreenshot(BaseModel): mimetype: str | None = Field( default=None, description="File's media type or None if unknown. SEE https://www.iana.org/assignments/media-types/media-types.xhtml", - example="image/jpeg", + examples=["image/jpeg"], ) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def guess_mimetype_if_undefined(cls, values): mimetype = values.get("mimetype") @@ -173,7 +171,7 @@ async def __get_link( return __get_search_key(file_meta_data), await get_download_link( app, user_id, - parse_obj_as(SimCoreFileLink, {"store": "0", "path": file_meta_data.file_id}), + TypeAdapter(SimCoreFileLink).validate_python({"store": "0", "path": file_meta_data.file_id}), ) @@ -228,7 +226,7 @@ async def get_node_screenshots( assert node.outputs is not None # nosec - filelink = parse_obj_as(SimCoreFileLink, node.outputs[KeyIDStr("outFile")]) + filelink = TypeAdapter(SimCoreFileLink).validate_python(node.outputs[TypeAdapter(KeyIDStr).validate_python("outFile")]) file_url = await get_download_link(app, user_id, filelink) screenshots.append( diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py b/services/web/server/src/simcore_service_webserver/projects/_ports_api.py index c31de86d4fa..6a0c816065f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_ports_api.py @@ -24,7 +24,7 @@ jsonschema_validate_data, ) from models_library.utils.services_io import JsonSchemaDict, get_service_io_json_schema -from pydantic import ValidationError +from pydantic import ConfigDict, ValidationError from ..director_v2.api import get_batch_tasks_outputs from .exceptions import InvalidInputValue @@ -163,8 +163,9 @@ def set_inputs_in_project( class _NonStrictPortLink(PortLink): - class Config(PortLink.Config): - allow_population_by_field_name = True + model_config = ConfigDict( + populate_by_name = True, + ) class _OutputPortInfo(NamedTuple): diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py index 25e9ef46876..f5e7ef86639 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_pricing_unit_handlers.py @@ -10,7 +10,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.resource_tracker import PricingPlanId, PricingUnitId -from pydantic import BaseModel, Extra +from pydantic import ConfigDict, BaseModel from pydantic.errors import PydanticErrorMixin from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as from servicelib.aiohttp.typing_extension import Handler @@ -99,9 +99,7 @@ class _ProjectNodePricingUnitPathParams(BaseModel): node_id: NodeID pricing_plan_id: PricingPlanId pricing_unit_id: PricingUnitId - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.put( diff --git a/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py index dfa85fdb8e1..3963be837ee 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_wallets_handlers.py @@ -9,7 +9,7 @@ from models_library.api_schemas_webserver.wallets import WalletGet from models_library.projects import ProjectID from models_library.wallets import WalletID -from pydantic import BaseModel, Extra +from pydantic import ConfigDict, BaseModel from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as from servicelib.aiohttp.typing_extension import Handler from simcore_service_webserver.utils_aiohttp import envelope_json_response @@ -69,9 +69,7 @@ async def get_project_wallet(request: web.Request): class _ProjectWalletPathParams(BaseModel): project_id: ProjectID wallet_id: WalletID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") @routes.put( diff --git a/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py index 667c5159689..0d36d74717a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_workspaces_handlers.py @@ -5,7 +5,7 @@ from models_library.projects import ProjectID from models_library.utils.common_validators import null_or_none_str_to_none_validator from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, Extra, validator +from pydantic import ConfigDict, BaseModel, field_validator from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON @@ -51,13 +51,11 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class _ProjectWorkspacesPathParams(BaseModel): project_id: ProjectID workspace_id: WorkspaceID | None - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") # validators - _null_or_none_str_to_none_validator = validator( - "workspace_id", allow_reuse=True, pre=True + _null_or_none_str_to_none_validator = field_validator( + "workspace_id", mode="before" )(null_or_none_str_to_none_validator) diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index c124c9547fc..9e3446cb445 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -12,7 +12,7 @@ none_to_empty_str_pre_validator, ) from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, validator +from pydantic import ConfigDict, BaseModel, field_validator from simcore_postgres_database.models.projects import ProjectType, projects ProjectDict: TypeAlias = dict[str, Any] @@ -50,20 +50,20 @@ class ProjectDB(BaseModel): published: bool hidden: bool workspace_id: WorkspaceID | None - - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) # validators - _empty_thumbnail_is_none = validator("thumbnail", allow_reuse=True, pre=True)( + _empty_thumbnail_is_none = field_validator("thumbnail", mode="before")( empty_str_to_none_pre_validator ) - _none_description_is_empty = validator("description", allow_reuse=True, pre=True)( + _none_description_is_empty = field_validator("description", mode="before")( none_to_empty_str_pre_validator ) -assert set(ProjectDB.__fields__.keys()).issubset( # nosec +assert set(ProjectDB.model_fields.keys()).issubset( # nosec {c.name for c in projects.columns if c.name not in ["access_rights"]} ) @@ -73,9 +73,9 @@ class UserProjectAccessRights(BaseModel): read: bool write: bool delete: bool - - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) __all__: tuple[str, ...] = ( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py index edfcea623d0..09be493447f 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_handlers.py @@ -20,7 +20,7 @@ PricingUnitWithCostUpdate, ) from models_library.users import UserID -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, BaseModel, Field from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -72,9 +72,9 @@ class _RequestContext(BaseModel): class _GetPricingPlanPathParams(BaseModel): pricing_plan_id: PricingPlanId - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @routes.get( @@ -256,9 +256,9 @@ async def update_pricing_plan(request: web.Request): class _GetPricingUnitPathParams(BaseModel): pricing_plan_id: PricingPlanId pricing_unit_id: PricingUnitId - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @routes.get( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py index 76d3466f56d..00a0e03f8c4 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_handlers.py @@ -4,7 +4,7 @@ from models_library.api_schemas_webserver.resource_usage import PricingUnitGet from models_library.resource_tracker import PricingPlanId, PricingUnitId from models_library.users import UserID -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, BaseModel, Field from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as from servicelib.aiohttp.typing_extension import Handler from servicelib.request_keys import RQT_USERID_KEY @@ -49,9 +49,9 @@ class _RequestContext(BaseModel): class _GetPricingPlanUnitPathParams(BaseModel): pricing_plan_id: PricingPlanId pricing_unit_id: PricingUnitId - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @routes.get( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py index 227c941bd38..cb867a01247 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py @@ -23,14 +23,12 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import ( - BaseModel, - Extra, + field_validator, ConfigDict, BaseModel, Field, Json, NonNegativeInt, - parse_obj_as, - validator, ) + from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON @@ -74,7 +72,7 @@ class _ListServicesResourceUsagesQueryParams(BaseModel): order_by: Json[OrderBy] = Field( # pylint: disable=unsubscriptable-object default=OrderBy(field=IDStr("started_at"), direction=OrderDirection.DESC), description=ORDER_BY_DESCRIPTION, - example='{"field": "started_at", "direction": "desc"}', + examples=['{"field": "started_at", "direction": "desc"}'], ) filters: ( Json[ServiceResourceUsagesFilters] # pylint: disable=unsubscriptable-object @@ -82,10 +80,10 @@ class _ListServicesResourceUsagesQueryParams(BaseModel): ) = Field( default=None, description="Filters to process on the resource usages list, encoded as JSON. Currently supports the filtering of 'started_at' field with 'from' and 'until' parameters in ISO 8601 format. The date range specified is inclusive.", - example='{"started_at": {"from": "yyyy-mm-dd", "until": "yyyy-mm-dd"}}', + examples=['{"started_at": {"from": "yyyy-mm-dd", "until": "yyyy-mm-dd"}}'], ) - @validator("order_by", allow_reuse=True) + @field_validator("order_by") @classmethod def validate_order_by_field(cls, v): if v.field not in { @@ -112,9 +110,7 @@ def validate_order_by_field(cls, v): if v.field == "credit_cost": v.field = "osparc_credits" return v - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class _ListServicesResourceUsagesQueryParamsWithPagination( @@ -129,18 +125,14 @@ class _ListServicesResourceUsagesQueryParamsWithPagination( offset: NonNegativeInt = Field( default=0, description="index to the first item to return (pagination)" ) - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class _ListServicesAggregatedUsagesQueryParams(PageQueryParameters): aggregated_by: ServicesAggregatedUsagesType time_period: ServicesAggregatedUsagesTimePeriod wallet_id: WalletID - - class Config: - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") # diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/models.py b/services/web/server/src/simcore_service_webserver/scicrunch/models.py index 743f4bd8211..88f15a6381e 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/models.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/models.py @@ -6,7 +6,7 @@ import re from datetime import datetime -from pydantic import BaseModel, Field, validator +from pydantic import field_validator, ConfigDict, BaseModel, Field logger = logging.getLogger(__name__) @@ -58,19 +58,19 @@ class ResearchResource(BaseModel): rrid: str = Field( ..., description="Unique identifier used as classifier, i.e. to tag studies and services", - regex=STRICT_RRID_PATTERN, + pattern=STRICT_RRID_PATTERN, ) name: str description: str - @validator("rrid", pre=True) + @field_validator("rrid", mode="before") @classmethod def format_rrid(cls, v): return normalize_rrid_tags(v, with_prefix=True) - - class Config: - orm_mode = True - anystr_strip_whitespace = True + model_config = ConfigDict( + from_attributes=True, + str_strip_whitespace=True, + ) # postgres_database.scicrunch_resources ORM -------------------- diff --git a/services/web/server/src/simcore_service_webserver/session/settings.py b/services/web/server/src/simcore_service_webserver/session/settings.py index b5f3c333fa8..bd6cf4ee921 100644 --- a/services/web/server/src/simcore_service_webserver/session/settings.py +++ b/services/web/server/src/simcore_service_webserver/session/settings.py @@ -1,8 +1,7 @@ from typing import Final from aiohttp import web -from pydantic import PositiveInt -from pydantic.class_validators import validator +from pydantic import AliasChoices, field_validator, PositiveInt from pydantic.fields import Field from pydantic.types import SecretStr from settings_library.base import BaseCustomSettings @@ -22,7 +21,7 @@ class SessionSettings(BaseCustomSettings, MixinSessionSettings): description="Secret key to encrypt cookies. " 'TIP: python3 -c "from cryptography.fernet import *; print(Fernet.generate_key())"', min_length=44, - env=["SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY"], + validation_alias=AliasChoices("SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY"), ) SESSION_ACCESS_TOKENS_EXPIRATION_INTERVAL_SECS: int = Field( @@ -53,12 +52,12 @@ class SessionSettings(BaseCustomSettings, MixinSessionSettings): description="This prevents JavaScript from accessing the session cookie", ) - @validator("SESSION_SECRET_KEY") + @field_validator("SESSION_SECRET_KEY") @classmethod def check_valid_fernet_key(cls, v): return cls.do_check_valid_fernet_key(v) - @validator("SESSION_COOKIE_SAMESITE") + @field_validator("SESSION_COOKIE_SAMESITE") @classmethod def check_valid_samesite_attribute(cls, v): # NOTE: Replacement to `Literal["Strict", "Lax"] | None` due to bug in settings_library/base.py:93: in prepare_field diff --git a/services/web/server/src/simcore_service_webserver/socketio/models.py b/services/web/server/src/simcore_service_webserver/socketio/models.py index 06e5b9014cb..5b82c0cfd88 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/models.py +++ b/services/web/server/src/simcore_service_webserver/socketio/models.py @@ -12,11 +12,11 @@ from models_library.socketio import SocketMessageDict from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import BaseModel, Field +from pydantic import ConfigDict, BaseModel class WebSocketMessageBase(BaseModel): - event_type: str = Field(..., const=True) + event_type: Literal[...] = ... @classmethod def get_event_type(cls) -> str: @@ -26,9 +26,9 @@ def get_event_type(cls) -> str: @abstractmethod def to_socket_dict(self) -> SocketMessageDict: ... - - class Config: - frozen = True + model_config = ConfigDict( + frozen=True, + ) class _WebSocketProjectMixin(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/statics/settings.py b/services/web/server/src/simcore_service_webserver/statics/settings.py index 275def8154b..46471b2f235 100644 --- a/services/web/server/src/simcore_service_webserver/statics/settings.py +++ b/services/web/server/src/simcore_service_webserver/statics/settings.py @@ -7,7 +7,7 @@ import pycountry from aiohttp import web from models_library.utils.change_case import snake_to_camel -from pydantic import AnyHttpUrl, Field, parse_obj_as +from pydantic import AliasChoices, AnyHttpUrl, Field, TypeAdapter from settings_library.base import BaseCustomSettings from .._constants import APP_SETTINGS_KEY @@ -121,12 +121,12 @@ def to_statics(self) -> dict[str, Any]: class StaticWebserverModuleSettings(BaseCustomSettings): STATIC_WEBSERVER_URL: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://static-webserver:8000"), + default=TypeAdapter(AnyHttpUrl).validate_python("http://static-webserver:8000"), description="url fort static content", - env=[ + validation_alias=AliasChoices( "STATIC_WEBSERVER_URL", "WEBSERVER_STATIC_MODULE_STATIC_WEB_SERVER_URL", # legacy - ], + ), ) diff --git a/services/web/server/src/simcore_service_webserver/storage/schemas.py b/services/web/server/src/simcore_service_webserver/storage/schemas.py index 4c47c99a8ff..c81e5d04b91 100644 --- a/services/web/server/src/simcore_service_webserver/storage/schemas.py +++ b/services/web/server/src/simcore_service_webserver/storage/schemas.py @@ -1,8 +1,7 @@ -from enum import Enum -from typing import Any, ClassVar, TypeAlias +from typing import Any, TypeAlias from models_library.api_schemas_storage import TableSynchronisation -from pydantic import BaseModel, Field +from pydantic import ConfigDict, BaseModel, RootModel # NOTE: storage generates URLs that contain double encoded # slashes, and when applying validation via `StorageFileID` @@ -14,14 +13,14 @@ class FileLocation(BaseModel): name: str | None = None id: float | None = None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "name": "simcore.s3", - "id": 0, + "id": 0, }, } + ) class FileLocationArray(BaseModel): @@ -60,18 +59,18 @@ class FileUploadCompleteFuture(BaseModel): class DatasetMetaData(BaseModel): dataset_id: str | None = None display_name: str | None = None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "dataset_id": "N:id-aaaa", "display_name": "simcore-testing", }, } + ) -class DatasetMetaDataArray(BaseModel): - __root__: list[DatasetMetaData] +class DatasetMetaDataArray(RootModel[list[DatasetMetaData]]): + ... class FileLocationEnveloped(BaseModel): @@ -121,9 +120,8 @@ class FileMetaData(BaseModel): file_size: int | None = None entity_tag: str | None = None is_directory: bool | None = None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "file_uuid": "simcore-testing/105/1000/3", "location_id": "0", @@ -138,6 +136,7 @@ class Config: "is_directory": False, } } + ) class FileMetaDataArray(BaseModel): @@ -151,9 +150,10 @@ class FileMetaEnvelope(BaseModel): class PresignedLink(BaseModel): link: str | None = None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = {"example": {"link": "example_link"}} + model_config = ConfigDict( + json_schema_extra={ + "example": {"link": "example_link"}}, + ) class PresignedLinkEnveloped(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py index 1b60fd5f7e0..83c13f95ea3 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py @@ -11,7 +11,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services import ServiceKey, ServiceVersion -from pydantic import BaseModel, Extra, ValidationError, validator +from pydantic import field_validator, ConfigDict, BaseModel, ValidationError from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as from servicelib.aiohttp.typing_extension import Handler @@ -146,15 +146,17 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class ServiceQueryParams(ServiceParams): - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) class FileQueryParams(FileParams): - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) - @validator("file_type") + @field_validator("file_type") @classmethod def ensure_extension_upper_and_dotless(cls, v): # NOTE: see filetype constraint-check @@ -165,14 +167,15 @@ def ensure_extension_upper_and_dotless(cls, v): class ServiceAndFileParams(FileQueryParams, ServiceParams): - class Config: + model_config = ConfigDict( # Optional configuration to exclude duplicates from schema - schema_extra = { + json_schema_extra={ "allOf": [ {"$ref": "#/definitions/FileParams"}, {"$ref": "#/definitions/ServiceParams"}, ] } + ) class ViewerQueryParams(BaseModel): @@ -189,7 +192,7 @@ def from_viewer(viewer: ViewerInfo) -> "ViewerQueryParams": viewer_version=viewer.version, ) - @validator("file_type") + @field_validator("file_type") @classmethod def ensure_extension_upper_and_dotless(cls, v): # NOTE: see filetype constraint-check diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py index 9f66cd460b0..70a0da3bef5 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py @@ -7,9 +7,10 @@ from aiohttp import web from aiohttp.web import Request +from common_library.pydantic_networks_extension import HttpUrlLegacy from models_library.services import ServiceKey from models_library.services_types import ServiceVersion -from pydantic import BaseModel, Field, ValidationError, parse_obj_as, validator +from pydantic import TypeAdapter, field_validator, ConfigDict, BaseModel, Field, ValidationError, parse_obj_as from pydantic.networks import HttpUrl from .._meta import API_VTAG @@ -32,11 +33,11 @@ def _compose_file_and_service_dispatcher_prefix_url( request: web.Request, viewer: ViewerInfo ) -> HttpUrl: """This is denoted PREFIX URL because it needs to append extra query parameters""" - params = ViewerQueryParams.from_viewer(viewer).dict() + params = ViewerQueryParams.from_viewer(viewer).model_dump() absolute_url = request.url.join( request.app.router["get_redirection_to_viewer"].url_for().with_query(**params) ) - absolute_url_: HttpUrl = parse_obj_as(HttpUrl, f"{absolute_url}") + absolute_url_: HttpUrl = TypeAdapter(HttpUrlLegacy).validate_python(f"{absolute_url}") return absolute_url_ @@ -44,13 +45,13 @@ def _compose_service_only_dispatcher_prefix_url( request: web.Request, service_key: str, service_version: str ) -> HttpUrl: params = ViewerQueryParams( - viewer_key=ServiceKey(service_key), - viewer_version=ServiceVersion(service_version), - ).dict(exclude_none=True, exclude_unset=True) + viewer_key=TypeAdapter(ServiceKey).validate_python(service_key), + viewer_version=TypeAdapter(ServiceVersion).validate_python(service_version), + ).model_dump(exclude_none=True, exclude_unset=True) absolute_url = request.url.join( request.app.router["get_redirection_to_viewer"].url_for().with_query(**params) ) - absolute_url_: HttpUrl = parse_obj_as(HttpUrl, f"{absolute_url}") + absolute_url_: HttpUrl = TypeAdapter(HttpUrlLegacy).validate_python(f"{absolute_url}") return absolute_url_ @@ -125,15 +126,14 @@ def create(cls, meta: ServiceMetaData, request: web.Request): **asdict(meta), ) - @validator("file_extensions") + @field_validator("file_extensions") @classmethod def remove_dot_prefix_from_extension(cls, v): if v: return [ext.removeprefix(".") for ext in v] return v - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "key": "simcore/services/dynamic/sim4life", "title": "Sim4Life Mattermost", @@ -143,6 +143,7 @@ class Config: "view_url": "https://host.com/view?viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=1.2.3", } } + ) # @@ -177,7 +178,7 @@ async def list_viewers(request: Request): file_type: str | None = request.query.get("file_type", None) viewers = [ - Viewer.create(request, viewer).dict() + Viewer.create(request, viewer).model_dump() for viewer in await list_viewers_info(request.app, file_type=file_type) ] return envelope_json_response(viewers) @@ -189,7 +190,7 @@ async def list_default_viewers(request: Request): file_type: str | None = request.query.get("file_type", None) viewers = [ - Viewer.create(request, viewer).dict() + Viewer.create(request, viewer).model_dump() for viewer in await list_viewers_info( request.app, file_type=file_type, only_default=True ) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py index 3ef317631ed..d5326c3ccbd 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py @@ -1,12 +1,14 @@ from datetime import timedelta -from typing import Any, ClassVar from aiohttp import web -from pydantic import ByteSize, HttpUrl, parse_obj_as, validator +from pydantic import TypeAdapter, field_validator, ByteSize from pydantic.fields import Field +from pydantic_settings import SettingsConfigDict from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from simcore_service_webserver.studies_dispatcher._rest_handlers import HttpUrlLegacy + class StudiesDispatcherSettings(BaseCustomSettings): STUDIES_ACCESS_ANONYMOUS_ALLOWED: bool = Field( @@ -20,23 +22,23 @@ class StudiesDispatcherSettings(BaseCustomSettings): " and removed by the GC", ) - STUDIES_DEFAULT_SERVICE_THUMBNAIL: HttpUrl = Field( - default=parse_obj_as(HttpUrl, "https://via.placeholder.com/170x120.png"), + STUDIES_DEFAULT_SERVICE_THUMBNAIL: HttpUrlLegacy = Field( + default=TypeAdapter(HttpUrlLegacy).validate_python("https://via.placeholder.com/170x120.png"), description="Default thumbnail for services or dispatch project with a service", ) - STUDIES_DEFAULT_FILE_THUMBNAIL: HttpUrl = Field( - default=parse_obj_as(HttpUrl, "https://via.placeholder.com/170x120.png"), + STUDIES_DEFAULT_FILE_THUMBNAIL: HttpUrlLegacy = Field( + default=TypeAdapter(HttpUrlLegacy).validate_python("https://via.placeholder.com/170x120.png"), description="Default thumbnail for dispatch projects with only data (i.e. file-picker)", ) STUDIES_MAX_FILE_SIZE_ALLOWED: ByteSize = Field( - default=parse_obj_as(ByteSize, "50Mib"), + default=TypeAdapter(ByteSize).validate_python("50Mib"), description="Limits the size of the files that can be dispatched" "Note that the accuracy of the file size is not guaranteed and this limit might be surpassed", ) - @validator("STUDIES_GUEST_ACCOUNT_LIFETIME") + @field_validator("STUDIES_GUEST_ACCOUNT_LIFETIME") @classmethod def _is_positive_lifetime(cls, v): if v and isinstance(v, timedelta) and v.total_seconds() <= 0: @@ -49,14 +51,14 @@ def is_login_required(self): Normally dispatcher entrypoints are openened """ return not self.STUDIES_ACCESS_ANONYMOUS_ALLOWED - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = SettingsConfigDict( + json_schema_extra={ "example": { "STUDIES_GUEST_ACCOUNT_LIFETIME": "2 1:10:00", # 2 days 1h and 10 mins "STUDIES_ACCESS_ANONYMOUS_ALLOWED": "1", }, } + ) def get_plugin_settings(app: web.Application) -> StudiesDispatcherSettings: diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications.py b/services/web/server/src/simcore_service_webserver/users/_notifications.py index 256e521f89c..39e6fda9208 100644 --- a/services/web/server/src/simcore_service_webserver/users/_notifications.py +++ b/services/web/server/src/simcore_service_webserver/users/_notifications.py @@ -6,7 +6,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.utils.enums import StrAutoEnum -from pydantic import BaseModel, NonNegativeInt, validator +from pydantic import field_validator, ConfigDict, BaseModel, NonNegativeInt MAX_NOTIFICATIONS_FOR_USER_TO_SHOW: Final[NonNegativeInt] = 10 MAX_NOTIFICATIONS_FOR_USER_TO_KEEP: Final[NonNegativeInt] = 100 @@ -33,7 +33,7 @@ class BaseUserNotification(BaseModel): date: datetime product: Literal["UNDEFINED"] | ProductName = "UNDEFINED" - @validator("category", pre=True) + @field_validator("category", mode="before") @classmethod def category_to_upper(cls, value: str) -> str: return value.upper() @@ -58,10 +58,9 @@ class UserNotification(BaseUserNotification): def create_from_request_data( cls, request_data: UserNotificationCreate ) -> "UserNotification": - return cls.construct(id=f"{uuid4()}", read=False, **request_data.dict()) - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + return cls.model_construct(id=f"{uuid4()}", read=False, **request_data.model_dump()) + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20eb8", @@ -120,3 +119,4 @@ class Config: }, ] } + ) diff --git a/services/web/server/src/simcore_service_webserver/users/_schemas.py b/services/web/server/src/simcore_service_webserver/users/_schemas.py index 1dd4f59992f..3c563561f98 100644 --- a/services/web/server/src/simcore_service_webserver/users/_schemas.py +++ b/services/web/server/src/simcore_service_webserver/users/_schemas.py @@ -12,7 +12,7 @@ from models_library.api_schemas_webserver._base import InputSchema, OutputSchema from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName -from pydantic import Field, root_validator, validator +from pydantic import ConfigDict, field_validator, model_validator, Field from simcore_postgres_database.models.users import UserStatus @@ -43,7 +43,7 @@ class UserProfile(OutputSchema): description="List of products this users is included or None if fields is unset", ) - @validator("status") + @field_validator("status") @classmethod def _consistency_check(cls, v, values): registered = values["registered"] @@ -74,11 +74,13 @@ class PreUserProfile(InputSchema): description="Keeps extra information provided in the request form. At most MAX_NUM_EXTRAS fields", ) - class Config(InputSchema.Config): - anystr_strip_whitespace = True - max_anystr_length = 200 + model_config = ConfigDict( + str_strip_whitespace = True, + str_max_length = 200 + ) - @root_validator(pre=True) + @model_validator(mode="before") + @classmethod @classmethod def _preprocess_aliases_and_extras(cls, values): # multiple aliases for "institution" @@ -92,8 +94,8 @@ def _preprocess_aliases_and_extras(cls, values): # collect extras extra_fields = {} field_names_and_aliases = ( - set(cls.__fields__.keys()) - | {f.alias for f in cls.__fields__.values() if f.alias} + set(cls.model_fields.keys()) + | {f.alias for f in cls.model_fields.values() if f.alias} | set(alias_by_priority) ) for key, value in values.items(): @@ -111,7 +113,7 @@ def _preprocess_aliases_and_extras(cls, values): return values - @validator("first_name", "last_name", "institution", pre=True) + @field_validator("first_name", "last_name", "institution", mode="before") @classmethod def _pre_normalize_given_names(cls, v): if v: @@ -120,7 +122,7 @@ def _pre_normalize_given_names(cls, v): return re.sub(r"\b\w+\b", lambda m: m.group(0).capitalize(), name) return v - @validator("country", pre=True) + @field_validator("country", mode="before") @classmethod def _pre_check_and_normalize_country(cls, v): if v: @@ -131,4 +133,4 @@ def _pre_check_and_normalize_country(cls, v): return v -assert set(PreUserProfile.__fields__).issubset(UserProfile.__fields__) # nosec +assert set(PreUserProfile.model_fields).issubset(UserProfile.model_fields) # nosec diff --git a/services/web/server/src/simcore_service_webserver/users/schemas.py b/services/web/server/src/simcore_service_webserver/users/schemas.py index 53c9ee9b756..32c9e57a33f 100644 --- a/services/web/server/src/simcore_service_webserver/users/schemas.py +++ b/services/web/server/src/simcore_service_webserver/users/schemas.py @@ -8,7 +8,7 @@ from models_library.emails import LowerCaseEmailStr from models_library.users import FirstNameStr, LastNameStr, UserID from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, Field, root_validator, validator +from pydantic import field_validator, model_validator, ConfigDict, BaseModel, Field from simcore_postgres_database.models.users import UserRole from ..utils import gravatar_hash @@ -27,14 +27,14 @@ class ThirdPartyToken(BaseModel): ) token_key: UUID = Field(..., description="basic token key") token_secret: UUID | None = None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "service": "github-api-v1", "token_key": "5f21abf5-c596-47b7-bfd1-c0e436ef1107", } } + ) class TokenCreate(ThirdPartyToken): @@ -49,14 +49,14 @@ class TokenCreate(ThirdPartyToken): class ProfileUpdate(BaseModel): first_name: FirstNameStr | None = None last_name: LastNameStr | None = None - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "example": { "first_name": "Pedro", "last_name": "Crespo", } } + ) class ProfileGet(BaseModel): @@ -74,13 +74,9 @@ class ProfileGet(BaseModel): ) preferences: AggregatedPreferences - class Config: - # NOTE: old models have an hybrid between snake and camel cases! - # Should be unified at some point - allow_population_by_field_name = True - json_dumps = json_dumps - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ "examples": [ { "id": 1, @@ -98,8 +94,9 @@ class Config: }, ] } + ) - @root_validator(pre=True) + @model_validator(mode="before") @classmethod def _auto_generate_gravatar(cls, values): gravatar_id = values.get("gravatar_id") @@ -108,7 +105,7 @@ def _auto_generate_gravatar(cls, values): values["gravatar_id"] = gravatar_hash(email) return values - @validator("role", pre=True) + @field_validator("role", mode="before") @classmethod def _to_upper_string(cls, v): if isinstance(v, str): diff --git a/services/web/server/src/simcore_service_webserver/utils_aiohttp.py b/services/web/server/src/simcore_service_webserver/utils_aiohttp.py index ae35a58ee6f..816122926f1 100644 --- a/services/web/server/src/simcore_service_webserver/utils_aiohttp.py +++ b/services/web/server/src/simcore_service_webserver/utils_aiohttp.py @@ -9,7 +9,6 @@ from models_library.generics import Envelope from models_library.utils.json_serialization import json_dumps from pydantic import BaseModel, Field -from pydantic.generics import GenericModel from servicelib.common_headers import X_FORWARDED_PROTO from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.rest_constants import RESPONSE_MODEL_POLICY @@ -116,7 +115,7 @@ def create_redirect_to_page_response( PageParameters = TypeVar("PageParameters", bound=BaseModel) -class NextPage(GenericModel, Generic[PageParameters]): +class NextPage(BaseModel, Generic[PageParameters]): """ This is the body of a 2XX response to pass the front-end what kind of page shall be display next and some information about it diff --git a/services/web/server/src/simcore_service_webserver/version_control/_handlers.py b/services/web/server/src/simcore_service_webserver/version_control/_handlers.py index 08791750ec0..bbf38b5f062 100644 --- a/services/web/server/src/simcore_service_webserver/version_control/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/version_control/_handlers.py @@ -4,7 +4,7 @@ from models_library.projects import ProjectID from models_library.rest_pagination import Page, PageQueryParameters from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, validator +from pydantic import field_validator, BaseModel from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -46,7 +46,7 @@ class _CheckpointsPathParam(BaseModel): project_uuid: ProjectID ref_id: RefID - @validator("ref_id", pre=True) + @field_validator("ref_id", mode="before") @classmethod def _normalize_refid(cls, v): if v and v == "HEAD": diff --git a/services/web/server/src/simcore_service_webserver/version_control/models.py b/services/web/server/src/simcore_service_webserver/version_control/models.py index a562459547e..bd9beda9a9c 100644 --- a/services/web/server/src/simcore_service_webserver/version_control/models.py +++ b/services/web/server/src/simcore_service_webserver/version_control/models.py @@ -5,7 +5,7 @@ from models_library.basic_types import SHA1Str from models_library.projects import ProjectID from models_library.projects_nodes import Node -from pydantic import BaseModel, Field, PositiveInt, StrictBool, StrictFloat, StrictInt +from pydantic import ConfigDict, BaseModel, Field, PositiveInt, StrictBool, StrictFloat, StrictInt from pydantic.networks import HttpUrl BuiltinTypes: TypeAlias = Union[StrictBool, StrictInt, StrictFloat, str] @@ -51,9 +51,9 @@ def from_commit_log(cls, commit: RowProxy, tags: list[RowProxy]) -> "Checkpoint" class WorkbenchView(BaseModel): """A view (i.e. read-only and visual) of the project's workbench""" - - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) # NOTE: Tmp replacing UUIDS by str due to a problem serializing to json UUID keys # in the response https://github.com/samuelcolvin/pydantic/issues/2096#issuecomment-814860206 diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py index 87109220837..20219328cee 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py @@ -8,7 +8,7 @@ from aiohttp import web from models_library.users import GroupID, UserID from models_library.wallets import WalletID -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, BaseModel, Field from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -60,18 +60,18 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class _WalletsGroupsPathParams(BaseModel): wallet_id: WalletID group_id: GroupID - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) class _WalletsGroupsBodyParams(BaseModel): read: bool write: bool delete: bool - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @routes.post( diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py index daeba51ae80..88b5330ed11 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py @@ -9,7 +9,7 @@ from aiohttp import web from models_library.users import GroupID from models_library.workspaces import WorkspaceID -from pydantic import BaseModel +from pydantic import ConfigDict, BaseModel from simcore_postgres_database.models.workspaces_access_rights import ( workspaces_access_rights, ) @@ -31,9 +31,9 @@ class WorkspaceGroupGetDB(BaseModel): delete: bool created: datetime modified: datetime - - class Config: - orm_mode = True + model_config = ConfigDict( + from_attributes=True, + ) ## DB API diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py index 1d926fdb14a..a5c614b67c1 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_handlers.py @@ -8,7 +8,7 @@ from aiohttp import web from models_library.users import GroupID, UserID from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, BaseModel, Field from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -60,18 +60,18 @@ async def wrapper(request: web.Request) -> web.StreamResponse: class _WorkspacesGroupsPathParams(BaseModel): workspace_id: WorkspaceID group_id: GroupID - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) class _WorkspacesGroupsBodyParams(BaseModel): read: bool write: bool delete: bool - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @routes.post( diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py index a95a97aa4d0..f9eba7f5f5b 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_handlers.py @@ -14,7 +14,7 @@ from models_library.rest_pagination_utils import paginate_data from models_library.users import UserID from models_library.workspaces import WorkspaceID -from pydantic import Extra, Field, Json, parse_obj_as, validator +from pydantic import TypeAdapter, field_validator, ConfigDict, Field, Json from servicelib.aiohttp.requests_validation import ( RequestParams, StrictRequestParams, @@ -74,11 +74,11 @@ class WorkspacesListWithJsonStrQueryParams(PageQueryParameters): order_by: Json[OrderBy] = Field( default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC), description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.", - example='{"field": "name", "direction": "desc"}', + examples=['{"field": "name", "direction": "desc"}'], alias="order_by", ) - @validator("order_by", check_fields=False) + @field_validator("order_by", check_fields=False) @classmethod def validate_order_by_field(cls, v): if v.field not in { @@ -91,9 +91,9 @@ def validate_order_by_field(cls, v): if v.field == "modified_at": v.field = "modified" return v - - class Config: - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) @routes.post(f"/{VTAG}/workspaces", name="create_workspace") @@ -132,7 +132,7 @@ async def list_workspaces(request: web.Request): product_name=req_ctx.product_name, offset=query_params.offset, limit=query_params.limit, - order_by=parse_obj_as(OrderBy, query_params.order_by), + order_by=TypeAdapter(OrderBy).validate_python(query_params.order_by), ) page = Page[WorkspaceGet].model_validate( @@ -145,7 +145,7 @@ async def list_workspaces(request: web.Request): ) ) return web.Response( - text=page.json(**RESPONSE_MODEL_POLICY), + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), content_type=MIMETYPE_APPLICATION_JSON, ) diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py index 72b754cf225..6ad69d6242e 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_core.py @@ -12,7 +12,7 @@ import pytest from models_library.projects import Project, ProjectID from models_library.projects_nodes_io import NodeID -from pydantic import validator +from pydantic import field_validator from pydantic.main import BaseModel from pydantic.networks import HttpUrl from pytest_simcore.helpers.webserver_fake_services_data import list_fake_file_consumers @@ -63,7 +63,7 @@ def test_url_quoting_and_validation(): class M(BaseModel): url: HttpUrl - @validator("url", pre=True) + @field_validator("url", mode="before") @classmethod def unquote_url(cls, v): w = urllib.parse.unquote(v) diff --git a/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py b/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py index 5e2b5a891b0..6568b1b7db4 100644 --- a/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py +++ b/services/web/server/tests/unit/isolated/test_utils_rate_limiting.py @@ -10,8 +10,9 @@ from aiohttp import web from aiohttp.test_utils import TestClient from aiohttp.web_exceptions import HTTPOk, HTTPTooManyRequests -from pydantic import ValidationError, conint, parse_obj_as +from pydantic import Field, TypeAdapter, ValidationError from simcore_service_webserver.utils_rate_limiting import global_rate_limit_route +from typing_extensions import Annotated TOTAL_TEST_TIME = 1 # secs MAX_NUM_REQUESTS = 3 @@ -110,7 +111,7 @@ async def test_global_rate_limit_route(requests_per_second: float, client: TestC for t in tasks: if retry_after := t.result().headers.get("Retry-After"): try: - parse_obj_as(conint(ge=1), retry_after) + TypeAdapter(Annotated[int, Field(ge=1)]).validate_python(retry_after) except ValidationError as err: failed.append((retry_after, f"{err}")) assert not failed diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py index 026d0276641..8c8eb95f55b 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/test_version_control.py @@ -4,16 +4,16 @@ from models_library.projects import NodesDict -from pydantic import BaseModel +from pydantic import ConfigDict, BaseModel from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.version_control.db import compute_workbench_checksum class WorkbenchModel(BaseModel): __root__: NodesDict - - class Config: - allow_population_by_field_name = True + model_config = ConfigDict( + populate_by_name=True, + ) def test_compute_workbench_checksum(fake_project: ProjectDict): From e64e49d6cbf1d337ca9c2d0c5540129b344afb02 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 11:30:40 +0200 Subject: [PATCH 272/280] fix model_config --- .../src/simcore_service_webserver/products/_handlers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/products/_handlers.py b/services/web/server/src/simcore_service_webserver/products/_handlers.py index a9d6b964961..ff3246f4558 100644 --- a/services/web/server/src/simcore_service_webserver/products/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/products/_handlers.py @@ -5,7 +5,7 @@ from models_library.api_schemas_webserver.product import GetCreditPrice, GetProduct from models_library.basic_types import IDStr from models_library.users import UserID -from pydantic import Extra, Field +from pydantic import Field from servicelib.aiohttp.requests_validation import ( RequestParams, StrictRequestParams, @@ -70,8 +70,8 @@ async def _get_product(request: web.Request): except KeyError as err: raise web.HTTPNotFound(reason=f"{product_name=} not found") from err - assert GetProduct.Config.extra == Extra.ignore # nosec - data = GetProduct(**product.dict(), templates=[]) + assert GetProduct.model_config["extra"] == "ignore" # nosec + data = GetProduct(**product.model_dump(), templates=[]) return envelope_json_response(data) From ca1aad25d4cc95e8e026c1b102111c1491ae82ea Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 12:27:48 +0200 Subject: [PATCH 273/280] fix import --- .../server/src/simcore_service_webserver/products/_model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py index ec566b06884..1dba5568966 100644 --- a/services/web/server/src/simcore_service_webserver/products/_model.py +++ b/services/web/server/src/simcore_service_webserver/products/_model.py @@ -2,7 +2,6 @@ import string from typing import ( # noqa: UP035 # pydantic does not validate with re.Pattern Any, - ClassVar, Pattern, ) @@ -14,7 +13,7 @@ from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName from models_library.utils.change_case import snake_to_camel -from pydantic import field_validator, ConfigDict, BaseModel, Field, PositiveInt, field_validator +from pydantic import ConfigDict, BaseModel, Field, PositiveInt, field_validator from simcore_postgres_database.models.products import ( EmailFeedback, Forum, From 580cc3919b5e229d340759dadbe02697b7dc08d3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 13:31:30 +0200 Subject: [PATCH 274/280] continue upgrading --- .../models/products.py | 8 +++- .../application_settings.py | 47 ++++++++++--------- .../login/settings.py | 4 +- .../products/_model.py | 15 +++--- .../studies_dispatcher/settings.py | 4 +- 5 files changed, 42 insertions(+), 36 deletions(-) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/products.py b/packages/postgres-database/src/simcore_postgres_database/models/products.py index 913c929e04c..35936bbb98f 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/products.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/products.py @@ -6,7 +6,13 @@ """ import json -from typing import Literal, TypedDict +import sys +from typing import Literal + +if sys.version_info >= (3, 12): + from typing import TypedDict +else: + from typing_extensions import TypedDict # See: https://docs.pydantic.dev/2.9/errors/usage_errors/#typed-dict-version import sqlalchemy as sa from sqlalchemy.dialects.postgresql import JSONB diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index 3ed7074d16e..fa134a196c4 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -3,6 +3,7 @@ from typing import Any, Final from aiohttp import web +from common_library.pydantic_fields_extension import is_nullable from models_library.basic_types import ( BootModeEnum, BuildTargetEnum, @@ -11,8 +12,8 @@ VersionTag, ) from models_library.utils.change_case import snake_to_camel -from pydantic import AliasChoices, TypeAdapter, field_validator, model_validator, AnyHttpUrl -from pydantic.fields import Field, ModelField +from pydantic import AliasChoices, TypeAdapter, ValidationInfo, field_validator, model_validator, AnyHttpUrl +from pydantic.fields import Field from pydantic.types import PositiveInt from settings_library.base import BaseCustomSettings from settings_library.email import SMTPSettings @@ -105,12 +106,12 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) WEBSERVER_LOGLEVEL: LogLevel = Field( default=LogLevel.WARNING.value, - env=["WEBSERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices("WEBSERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), # NOTE: suffix '_LOGLEVEL' is used overall ) WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=["WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], + validation_alias=AliasChoices("WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) # TODO: find a better name!? @@ -168,51 +169,50 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) WEBSERVER_DYNAMIC_SCHEDULER: DynamicSchedulerSettings | None = Field( - auto_default_from_env=True, description="dynamic-scheduler plugin settings" + description="dynamic-scheduler plugin settings", json_schema_extra={"auto_default_from_env": True} ) - WEBSERVER_REDIS: RedisSettings | None = Field(auto_default_from_env=True) + WEBSERVER_REDIS: RedisSettings | None = Field(json_schema_extra={"auto_default_from_env": True}) WEBSERVER_REST: RestSettings | None = Field( - auto_default_from_env=True, description="rest api plugin" + description="rest api plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_RESOURCE_MANAGER: ResourceManagerSettings = Field( - auto_default_from_env=True, description="resource_manager plugin" + description="resource_manager plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings | None = Field( - auto_default_from_env=True, - description="resource usage tracker service client's plugin", + description="resource usage tracker service client's plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_SCICRUNCH: SciCrunchSettings | None = Field( - auto_default_from_env=True, description="scicrunch plugin" + description="scicrunch plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_SESSION: SessionSettings = Field( - auto_default_from_env=True, description="session plugin" + description="session plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_STATICWEB: StaticWebserverModuleSettings | None = Field( - auto_default_from_env=True, description="static-webserver service plugin" + description="static-webserver service plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_STORAGE: StorageSettings | None = Field( - auto_default_from_env=True, description="storage service client's plugin" + description="storage service client's plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_STUDIES_DISPATCHER: StudiesDispatcherSettings | None = Field( - auto_default_from_env=True, description="studies dispatcher plugin" + description="studies dispatcher plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="tracing plugin" + description="tracing plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_PROJECTS: ProjectsSettings | None = Field( - auto_default_from_env=True, description="projects plugin" + description="projects plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_RABBITMQ: RabbitSettings | None = Field( - auto_default_from_env=True, description="rabbitmq plugin" + description="rabbitmq plugin", json_schema_extra={"auto_default_from_env": True} ) WEBSERVER_USERS: UsersSettings | None = Field( - auto_default_from_env=True, description="users plugin" + description="users plugin", json_schema_extra={"auto_default_from_env": True} ) # These plugins only require (for the moment) an entry to toggle between enabled/disabled @@ -268,17 +268,18 @@ def build_vcs_release_url_if_unset(cls, values): mode="before" ) @classmethod - def enable_only_if_dev_features_allowed(cls, v, values, field: ModelField): + def enable_only_if_dev_features_allowed(cls, v, info: ValidationInfo): """Ensures that plugins 'under development' get programatically disabled if WEBSERVER_DEV_FEATURES_ENABLED=False """ - if values["WEBSERVER_DEV_FEATURES_ENABLED"]: + if info.data["WEBSERVER_DEV_FEATURES_ENABLED"]: return v if v: _logger.warning( - "%s still under development and will be disabled.", field.name + "%s still under development and will be disabled.", info.field_name ) - return None if field.allow_none else False + + return None if info.field_name and is_nullable(cls.model_fields[info.field_name]) else False @cached_property def log_level(self) -> int: diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py index 307e5424cff..6288f267257 100644 --- a/services/web/server/src/simcore_service_webserver/login/settings.py +++ b/services/web/server/src/simcore_service_webserver/login/settings.py @@ -36,8 +36,8 @@ class LoginSettings(BaseCustomSettings): ) LOGIN_TWILIO: TwilioSettings | None = Field( - auto_default_from_env=True, description="Twilio service settings. Used to send SMS for 2FA", + json_schema_extra={"auto_default_from_env=True": True} ) LOGIN_2FA_CODE_EXPIRATION_SEC: PositiveInt = Field( @@ -94,7 +94,7 @@ def create_from_composition( """ For the LoginSettings, product-specific settings override app-specifics settings """ - composed_settings = {**app_login_settings.dict(), **product_login_settings} + composed_settings = {**app_login_settings.model_dump(), **product_login_settings} if "two_factor_enabled" in composed_settings: # legacy safe diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py index 1dba5568966..1b72b0d9c21 100644 --- a/services/web/server/src/simcore_service_webserver/products/_model.py +++ b/services/web/server/src/simcore_service_webserver/products/_model.py @@ -1,9 +1,9 @@ import logging import string -from typing import ( # noqa: UP035 # pydantic does not validate with re.Pattern - Any, - Pattern, +from typing import ( + Any ) +import re from models_library.basic_regex import ( PUBLIC_VARIABLE_NAME_RE, @@ -44,14 +44,13 @@ class Product(BaseModel): display_name: str = Field(..., description="Long display name") short_name: str | None = Field( None, - pattern=TWILIO_ALPHANUMERIC_SENDER_ID_RE, + pattern=re.compile(TWILIO_ALPHANUMERIC_SENDER_ID_RE), min_length=2, max_length=11, description="Short display name for SMS", ) - host_regex: Pattern = Field(..., description="Host regex") - # NOTE: typing.Pattern is supported but not re.Pattern (SEE https://github.com/pydantic/pydantic/pull/4366) + host_regex: re.Pattern = Field(..., description="Host regex") support_email: LowerCaseEmailStr = Field( ..., @@ -110,7 +109,7 @@ class Product(BaseModel): @field_validator("*", mode="before") @classmethod - def parse_empty_string_as_null(cls, v): + def _parse_empty_string_as_null(cls, v): """Safe measure: database entries are sometimes left blank instead of null""" if isinstance(v, str) and len(v.strip()) == 0: return None @@ -118,7 +117,7 @@ def parse_empty_string_as_null(cls, v): @field_validator("name", mode="before") @classmethod - def validate_name(cls, v): + def _validate_name(cls, v): if v not in FRONTEND_APPS_AVAILABLE: msg = f"{v} is not in available front-end apps {FRONTEND_APPS_AVAILABLE}" raise ValueError(msg) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py index d5326c3ccbd..14318d10656 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py @@ -4,11 +4,11 @@ from pydantic import TypeAdapter, field_validator, ByteSize from pydantic.fields import Field from pydantic_settings import SettingsConfigDict + +from common_library.pydantic_networks_extension import HttpUrlLegacy from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings -from simcore_service_webserver.studies_dispatcher._rest_handlers import HttpUrlLegacy - class StudiesDispatcherSettings(BaseCustomSettings): STUDIES_ACCESS_ANONYMOUS_ALLOWED: bool = Field( From c0af73d8677145bb0d308f5000615f754359c3cc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 13:42:07 +0200 Subject: [PATCH 275/280] fix test --- .../projects/_crud_api_create.py | 15 +++++++-------- .../simcore_service_webserver/projects/models.py | 1 + 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index 3be4c48051e..044d2ae6004 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -2,7 +2,7 @@ import logging from collections.abc import Coroutine from contextlib import AsyncExitStack -from typing import Any, TypeAlias +from typing import Any, Type, TypeAlias from aiohttp import web from jsonschema import ValidationError as JsonSchemaValidationError @@ -15,7 +15,7 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.utils.json_serialization import json_dumps from models_library.workspaces import UserWorkspaceAccessRightsDB -from pydantic import parse_obj_as +from pydantic import TypeAdapter, parse_obj_as from servicelib.aiohttp.long_running_tasks.server import TaskProgress from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.utils_projects_nodes import ( @@ -132,14 +132,14 @@ async def _copy_project_nodes_from_source_project( db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(app) def _mapped_node_id(node: ProjectNode) -> NodeID: - return NodeID(nodes_map[NodeIDStr(f"{node.node_id}")]) + return NodeID(nodes_map[TypeAdapter(NodeIDStr).validate_python(f"{node.node_id}")]) return { _mapped_node_id(node): ProjectNodeCreate( node_id=_mapped_node_id(node), **{ k: v - for k, v in node.dict().items() + for k, v in node.model_dump().items() if k in ProjectNodeCreate.get_field_names(exclude={"node_id"}) }, ) @@ -157,7 +157,7 @@ async def _copy_files_from_source_project( ): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(app) needs_lock_source_project: bool = ( - await db.get_project_type(parse_obj_as(ProjectID, source_project["uuid"])) + await db.get_project_type(TypeAdapter(ProjectID).validate_python(source_project["uuid"])) != ProjectTypeDB.TEMPLATE ) @@ -178,8 +178,7 @@ async def _copy_files_from_source_project( ): task_progress.update( message=long_running_task.progress.message, - percent=parse_obj_as( - ProgressPercent, + percent=TypeAdapter(ProgressPercent).validate_python( ( starting_value + long_running_task.progress.percent * (1.0 - starting_value) @@ -403,7 +402,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche ) ) new_project["accessRights"] = { - gid: access.dict() for gid, access in workspace_db.access_rights.items() + gid: access.model_dump() for gid, access in workspace_db.access_rights.items() } # Ensures is like ProjectGet diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index 9e3446cb445..051caa3cd05 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -52,6 +52,7 @@ class ProjectDB(BaseModel): workspace_id: WorkspaceID | None model_config = ConfigDict( from_attributes=True, + arbitrary_types_allowed=True, ) # validators From 25752370ffdf5876e1b2e097826fc94bcbe19f78 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 13:48:48 +0200 Subject: [PATCH 276/280] fix field --- .../src/simcore_service_payments/models/payments_gateway.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/payments/src/simcore_service_payments/models/payments_gateway.py b/services/payments/src/simcore_service_payments/models/payments_gateway.py index ba3a9f6b087..a5d082ba353 100644 --- a/services/payments/src/simcore_service_payments/models/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/models/payments_gateway.py @@ -30,7 +30,9 @@ class InitPayment(BaseModel): amount_dollars: AmountDecimal # metadata to store for billing or reference credits_: AmountDecimal = Field( - ..., alias="credits", describe="This is equal to `quantity` field in Stripe" + ..., + alias="credits", + description="This is equal to `quantity` field in Stripe", ) user_name: IDStr user_email: EmailStr From ac56fe4e7125e13ce88c8e5394729f9a89994df9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 13:52:57 +0200 Subject: [PATCH 277/280] fix mypy --- .../src/models_library/api_schemas_payments/errors.py | 4 ++-- .../payments/src/simcore_service_payments/models/db.py | 8 ++++---- .../models/schemas/acknowledgements.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_payments/errors.py b/packages/models-library/src/models_library/api_schemas_payments/errors.py index eaeba92aab1..362482772f7 100644 --- a/packages/models-library/src/models_library/api_schemas_payments/errors.py +++ b/packages/models-library/src/models_library/api_schemas_payments/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class _BaseRpcApiError(PydanticErrorMixin, ValueError): +class _BaseRpcApiError(OsparcErrorMixin, ValueError): @classmethod def get_full_class_name(cls) -> str: # Can be used as unique code identifier diff --git a/services/payments/src/simcore_service_payments/models/db.py b/services/payments/src/simcore_service_payments/models/db.py index f14ce83a234..5b97256bd08 100644 --- a/services/payments/src/simcore_service_payments/models/db.py +++ b/services/payments/src/simcore_service_payments/models/db.py @@ -50,10 +50,10 @@ class PaymentsTransactionsDB(BaseModel): from_attributes=True, json_schema_extra={ "examples": [ - _EXAMPLE_AFTER_INIT, + _EXAMPLE_AFTER_INIT, # type: ignore[list-item] # successful completion { - **_EXAMPLE_AFTER_INIT, + **_EXAMPLE_AFTER_INIT, # type: ignore[dict-item] "invoice_url": "https://my-fake-pdf-link.com", "stripe_invoice_id": "12345", "invoice_pdf_url": "https://my-fake-pdf-link.com", @@ -89,10 +89,10 @@ class PaymentsMethodsDB(BaseModel): from_attributes=True, json_schema_extra={ "examples": [ - _EXAMPLE_AFTER_INIT_PAYMENT_METHOD, + _EXAMPLE_AFTER_INIT_PAYMENT_METHOD, # type: ignore[list-item] # successful completion { - **_EXAMPLE_AFTER_INIT_PAYMENT_METHOD, + **_EXAMPLE_AFTER_INIT_PAYMENT_METHOD, # type: ignore[dict-item] "completed_at": "2023-09-27T10:00:15", "state": "SUCCESS", "state_message": "Payment method completed successfully", diff --git a/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py b/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py index 49f2687b420..adeda5c8a7e 100644 --- a/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py +++ b/services/payments/src/simcore_service_payments/models/schemas/acknowledgements.py @@ -90,7 +90,7 @@ class AckPayment(_BaseAckPayment): model_config = ConfigDict( json_schema_extra={ "example": _EXAMPLES[1].copy(), # shown in openapi.json - "examples": _EXAMPLES, + "examples": _EXAMPLES, # type: ignore[dict-item] } ) From c34cee902d9faaf03ffbfc7c300a4494101e1e22 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 13:55:53 +0200 Subject: [PATCH 278/280] fix mypy --- services/payments/src/simcore_service_payments/core/errors.py | 4 ++-- .../payments/src/simcore_service_payments/services/stripe.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/payments/src/simcore_service_payments/core/errors.py b/services/payments/src/simcore_service_payments/core/errors.py index 5fce135e800..8b5113891fc 100644 --- a/services/payments/src/simcore_service_payments/core/errors.py +++ b/services/payments/src/simcore_service_payments/core/errors.py @@ -1,7 +1,7 @@ -from pydantic.errors import PydanticErrorMixin +from common_library.errors_classes import OsparcErrorMixin -class _BaseAppError(PydanticErrorMixin, ValueError): +class _BaseAppError(OsparcErrorMixin, ValueError): @classmethod def get_full_class_name(cls) -> str: # Can be used as unique code identifier diff --git a/services/payments/src/simcore_service_payments/services/stripe.py b/services/payments/src/simcore_service_payments/services/stripe.py index 38cc21fab0e..c0b4f18d4fe 100644 --- a/services/payments/src/simcore_service_payments/services/stripe.py +++ b/services/payments/src/simcore_service_payments/services/stripe.py @@ -34,7 +34,7 @@ def _raise_as_stripe_error(): yield except HTTPStatusError as err: - raise StripeRuntimeError from err + raise StripeRuntimeError() from err def _handle_status_errors(coro: Callable): From 93161aaaed2b3e30244a3bc8d4c0a342e76136db Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 14:10:28 +0200 Subject: [PATCH 279/280] fix typo --- .../web/server/src/simcore_service_webserver/login/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py index 6288f267257..d3db33a991c 100644 --- a/services/web/server/src/simcore_service_webserver/login/settings.py +++ b/services/web/server/src/simcore_service_webserver/login/settings.py @@ -37,7 +37,7 @@ class LoginSettings(BaseCustomSettings): LOGIN_TWILIO: TwilioSettings | None = Field( description="Twilio service settings. Used to send SMS for 2FA", - json_schema_extra={"auto_default_from_env=True": True} + json_schema_extra={"auto_default_from_env": True} ) LOGIN_2FA_CODE_EXPIRATION_SEC: PositiveInt = Field( From 37ee7297ad21fdf6e9632ded1417a41a129d1236 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 9 Oct 2024 14:16:13 +0200 Subject: [PATCH 280/280] fix errors --- .../core/errors.py | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index e8c47a934d3..dac383a10e3 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -19,10 +19,10 @@ } """ +from common_library.errors_classes import OsparcErrorMixin from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from pydantic.errors import PydanticErrorMixin class DirectorError(Exception): @@ -114,15 +114,15 @@ def __init__(self, pipeline_id: str): super().__init__(f"pipeline {pipeline_id} not found") -class ComputationalRunNotFoundError(PydanticErrorMixin, DirectorError): +class ComputationalRunNotFoundError(OsparcErrorMixin, DirectorError): msg_template = "Computational run not found" -class ComputationalTaskNotFoundError(PydanticErrorMixin, DirectorError): +class ComputationalTaskNotFoundError(OsparcErrorMixin, DirectorError): msg_template = "Computational task {node_id} not found" -class WalletNotEnoughCreditsError(PydanticErrorMixin, DirectorError): +class WalletNotEnoughCreditsError(OsparcErrorMixin, DirectorError): msg_template = "Wallet '{wallet_name}' has {wallet_credit_amount} credits." @@ -227,38 +227,38 @@ def get_errors(self) -> list[ErrorDict]: return value_errors -class ComputationalSchedulerChangedError(PydanticErrorMixin, SchedulerError): +class ComputationalSchedulerChangedError(OsparcErrorMixin, SchedulerError): code = "computational_backend.scheduler_changed" msg_template = "The dask scheduler ID changed from '{original_scheduler_id}' to '{current_scheduler_id}'" -class ComputationalBackendNotConnectedError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendNotConnectedError(OsparcErrorMixin, SchedulerError): code = "computational_backend.not_connected" msg_template = "The dask computational backend is not connected" -class ComputationalBackendNoS3AccessError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendNoS3AccessError(OsparcErrorMixin, SchedulerError): msg_template = "The S3 backend is not ready, please try again later" -class ComputationalBackendTaskNotFoundError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendTaskNotFoundError(OsparcErrorMixin, SchedulerError): code = "computational_backend.task_not_found" msg_template = ( "The dask computational backend does not know about the task '{job_id}'" ) -class ComputationalBackendTaskResultsNotReadyError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendTaskResultsNotReadyError(OsparcErrorMixin, SchedulerError): code = "computational_backend.task_result_not_ready" msg_template = "The task result is not ready yet for job '{job_id}'" -class ClustersKeeperNotAvailableError(PydanticErrorMixin, SchedulerError): +class ClustersKeeperNotAvailableError(OsparcErrorMixin, SchedulerError): code = "computational_backend.clusters_keeper_not_available" msg_template = "clusters-keeper service is not available!" -class ComputationalBackendOnDemandNotReadyError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError): code = "computational_backend.on_demand_cluster.not_ready" msg_template = ( "The on demand computational cluster is not ready 'est. remaining time: {eta}'" @@ -268,16 +268,16 @@ class ComputationalBackendOnDemandNotReadyError(PydanticErrorMixin, SchedulerErr # # SCHEDULER/CLUSTER ERRORS # -class ClusterNotFoundError(PydanticErrorMixin, SchedulerError): +class ClusterNotFoundError(OsparcErrorMixin, SchedulerError): code = "cluster.not_found" msg_template = "The cluster '{cluster_id}' not found" -class ClusterAccessForbiddenError(PydanticErrorMixin, SchedulerError): +class ClusterAccessForbiddenError(OsparcErrorMixin, SchedulerError): msg_template = "Insufficient rights to access cluster '{cluster_id}'" -class ClusterInvalidOperationError(PydanticErrorMixin, SchedulerError): +class ClusterInvalidOperationError(OsparcErrorMixin, SchedulerError): msg_template = "Invalid operation on cluster '{cluster_id}'" @@ -286,24 +286,24 @@ class ClusterInvalidOperationError(PydanticErrorMixin, SchedulerError): # -class DaskClientRequestError(PydanticErrorMixin, SchedulerError): +class DaskClientRequestError(OsparcErrorMixin, SchedulerError): code = "dask_client.request.error" msg_template = ( "The dask client to cluster on '{endpoint}' did an invalid request '{error}'" ) -class DaskClusterError(PydanticErrorMixin, SchedulerError): +class DaskClusterError(OsparcErrorMixin, SchedulerError): code = "cluster.error" msg_template = "The dask cluster on '{endpoint}' encountered an error: '{error}'" -class DaskGatewayServerError(PydanticErrorMixin, SchedulerError): +class DaskGatewayServerError(OsparcErrorMixin, SchedulerError): code = "gateway.error" msg_template = "The dask gateway on '{endpoint}' encountered an error: '{error}'" -class DaskClientAcquisisitonError(PydanticErrorMixin, SchedulerError): +class DaskClientAcquisisitonError(OsparcErrorMixin, SchedulerError): code = "dask_client.acquisition.error" msg_template = ( "The dask client to cluster '{cluster}' encountered an error '{error}'"