diff --git a/packages/models-library/requirements/_base.in b/packages/models-library/requirements/_base.in
index 01da93156ecb..e2f8eae097ad 100644
--- a/packages/models-library/requirements/_base.in
+++ b/packages/models-library/requirements/_base.in
@@ -7,3 +7,5 @@ arrow
jsonschema
orjson
pydantic[email]
+pydantic-settings
+pydantic-extra-types
diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt
index f900e7715f00..03e1b39d6777 100644
--- a/packages/models-library/requirements/_base.txt
+++ b/packages/models-library/requirements/_base.txt
@@ -1,3 +1,5 @@
+annotated-types==0.7.0
+ # via pydantic
arrow==1.3.0
# via -r requirements/_base.in
attrs==24.2.0
@@ -18,12 +20,22 @@ orjson==3.10.7
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
-pydantic==1.10.17
+pydantic==2.9.1
# via
# -c requirements/../../../requirements/constraints.txt
# -r requirements/_base.in
+ # pydantic-extra-types
+ # pydantic-settings
+pydantic-core==2.23.3
+ # via pydantic
+pydantic-extra-types==2.9.0
+ # via -r requirements/_base.in
+pydantic-settings==2.4.0
+ # via -r requirements/_base.in
python-dateutil==2.9.0.post0
# via arrow
+python-dotenv==1.0.1
+ # via pydantic-settings
referencing==0.35.1
# via
# jsonschema
@@ -37,4 +49,6 @@ six==1.16.0
types-python-dateutil==2.9.0.20240821
# via arrow
typing-extensions==4.12.2
- # via pydantic
+ # via
+ # pydantic
+ # pydantic-core
diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt
index 9457e9414483..b44ab4c5a3fe 100644
--- a/packages/models-library/requirements/_test.txt
+++ b/packages/models-library/requirements/_test.txt
@@ -8,8 +8,6 @@ coverage==7.6.1
# via
# -r requirements/_test.in
# pytest-cov
-exceptiongroup==1.2.2
- # via pytest
faker==27.0.0
# via -r requirements/_test.in
flexcache==0.3
@@ -68,7 +66,9 @@ python-dateutil==2.9.0.post0
# -c requirements/_base.txt
# faker
python-dotenv==1.0.1
- # via -r requirements/_test.in
+ # via
+ # -c requirements/_base.txt
+ # -r requirements/_test.in
pyyaml==6.0.2
# via
# -c requirements/../../../requirements/constraints.txt
@@ -87,10 +87,6 @@ six==1.16.0
# python-dateutil
termcolor==2.4.0
# via pytest-sugar
-tomli==2.0.1
- # via
- # coverage
- # pytest
types-jsonschema==4.23.0.20240813
# via -r requirements/_test.in
types-pyyaml==6.0.12.20240808
diff --git a/packages/models-library/requirements/_tools.txt b/packages/models-library/requirements/_tools.txt
index 0ea376ae45aa..0efdb1139714 100644
--- a/packages/models-library/requirements/_tools.txt
+++ b/packages/models-library/requirements/_tools.txt
@@ -78,14 +78,6 @@ setuptools==73.0.1
# via pip-tools
shellingham==1.5.4
# via typer
-tomli==2.0.1
- # via
- # -c requirements/_test.txt
- # black
- # build
- # mypy
- # pip-tools
- # pylint
tomlkit==0.13.2
# via pylint
typer==0.12.4
@@ -94,8 +86,6 @@ typing-extensions==4.12.2
# via
# -c requirements/_base.txt
# -c requirements/_test.txt
- # astroid
- # black
# mypy
# typer
virtualenv==20.26.3
diff --git a/packages/models-library/scripts/validate-pg-projects.py b/packages/models-library/scripts/validate-pg-projects.py
old mode 100755
new mode 100644
index e74b438698d8..978e32cfc6f7
--- a/packages/models-library/scripts/validate-pg-projects.py
+++ b/packages/models-library/scripts/validate-pg-projects.py
@@ -4,14 +4,10 @@
import typer
from models_library.projects import ProjectAtDB
-from pydantic import Json, ValidationError, validator
-from pydantic.main import Extra
+from pydantic import ConfigDict, Json, ValidationError, field_validator
class ProjectFromCsv(ProjectAtDB):
- class Config(ProjectAtDB.Config):
- extra = Extra.forbid
-
# TODO: missing in ProjectAtDB
access_rights: Json
@@ -22,9 +18,11 @@ class Config(ProjectAtDB.Config):
hidden: bool
+ model_config = ConfigDict(extra="forbid")
+
# NOTE: validators introduced to parse CSV
- @validator("published", "hidden", pre=True, check_fields=False)
+ @field_validator("published", "hidden", mode="before", check_fields=False)
@classmethod
def empty_str_as_false(cls, v):
# See booleans for >v1.0 https://pydantic-docs.helpmanual.io/usage/types/#booleans
@@ -32,7 +30,7 @@ def empty_str_as_false(cls, v):
return False
return v
- @validator("workbench", pre=True, check_fields=False)
+ @field_validator("workbench", mode="before", check_fields=False)
@classmethod
def jsonstr_to_dict(cls, v):
if isinstance(v, str):
diff --git a/packages/models-library/src/models_library/access_rights.py b/packages/models-library/src/models_library/access_rights.py
index b1218b858a10..a6cea15a946c 100644
--- a/packages/models-library/src/models_library/access_rights.py
+++ b/packages/models-library/src/models_library/access_rights.py
@@ -1,4 +1,4 @@
-from pydantic import BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field
class AccessRights(BaseModel):
@@ -6,5 +6,4 @@ class AccessRights(BaseModel):
write: bool = Field(..., description="has write access")
delete: bool = Field(..., description="has deletion rights")
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/aiodocker_api.py b/packages/models-library/src/models_library/aiodocker_api.py
index 757036f31e89..c5c3b3333a0f 100644
--- a/packages/models-library/src/models_library/aiodocker_api.py
+++ b/packages/models-library/src/models_library/aiodocker_api.py
@@ -1,4 +1,4 @@
-from pydantic import Field, validator
+from pydantic import ConfigDict, Field, field_validator
from .generated_models.docker_rest_api import (
ContainerSpec,
@@ -16,7 +16,7 @@ class AioDockerContainerSpec(ContainerSpec):
description="aiodocker expects here a dictionary and re-convert it back internally`.\n",
)
- @validator("Env", pre=True)
+ @field_validator("Env", mode="before")
@classmethod
def convert_list_to_dict(cls, v):
if v is not None and isinstance(v, list):
@@ -37,8 +37,7 @@ class AioDockerResources1(Resources1):
None, description="Define resources reservation.", alias="Reservations"
)
- class Config(Resources1.Config): # type: ignore
- allow_population_by_field_name = True
+ model_config = ConfigDict(populate_by_name=True)
class AioDockerTaskSpec(TaskSpec):
@@ -55,6 +54,4 @@ class AioDockerTaskSpec(TaskSpec):
class AioDockerServiceSpec(ServiceSpec):
TaskTemplate: AioDockerTaskSpec | None = None
- class Config(ServiceSpec.Config): # type: ignore
- alias_generator = camel_to_snake
- allow_population_by_field_name = True
+ model_config = ConfigDict(populate_by_name=True, alias_generator=camel_to_snake)
diff --git a/packages/models-library/src/models_library/api_schemas__common/meta.py b/packages/models-library/src/models_library/api_schemas__common/meta.py
index 8cd2db53ae61..514abdc7d6df 100644
--- a/packages/models-library/src/models_library/api_schemas__common/meta.py
+++ b/packages/models-library/src/models_library/api_schemas__common/meta.py
@@ -1,6 +1,4 @@
-from typing import Any, ClassVar
-
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from ..basic_types import VersionStr
@@ -12,11 +10,12 @@ class BaseMeta(BaseModel):
default=None, description="Maps every route's path tag with a released version"
)
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"name": "simcore_service_foo",
"version": "2.4.45",
"released": {"v1": "1.3.4", "v2": "2.4.45"},
}
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py b/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py
index d828fc6507d5..999cb2f192cb 100644
--- a/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py
+++ b/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py
@@ -1,4 +1,4 @@
-from pydantic import BaseModel, Field, SecretStr
+from pydantic import BaseModel, ConfigDict, Field, SecretStr
class ApiKey(BaseModel):
@@ -15,5 +15,4 @@ class ApiKeyInDB(BaseModel):
user_id: int
product_name: str
- class Config:
- orm_mode = True
+ model_config = ConfigDict(from_attributes=True)
diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services.py b/packages/models-library/src/models_library/api_schemas_catalog/services.py
index ab0c98c4dc54..db386a8714fc 100644
--- a/packages/models-library/src/models_library/api_schemas_catalog/services.py
+++ b/packages/models-library/src/models_library/api_schemas_catalog/services.py
@@ -1,8 +1,8 @@
from datetime import datetime
-from typing import Any, ClassVar, TypeAlias
+from typing import Any, TypeAlias
from models_library.rpc_pagination import PageRpc
-from pydantic import BaseModel, Extra, Field, HttpUrl, NonNegativeInt
+from pydantic import BaseModel, ConfigDict, Field, HttpUrl, NonNegativeInt
from ..boot_options import BootOptions
from ..emails import LowerCaseEmailStr
@@ -23,23 +23,23 @@
class ServiceUpdate(ServiceMetaDataEditable, ServiceAccessRights):
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
# ServiceAccessRights
"accessRights": {
1: {
"execute_access": False,
"write_access": False,
- },
+ }, # type: ignore[dict-item]
2: {
"execute_access": True,
"write_access": True,
- },
+ }, # type: ignore[dict-item]
44: {
"execute_access": False,
"write_access": False,
- },
+ }, # type: ignore[dict-item]
},
# ServiceMetaData = ServiceCommonData +
"name": "My Human Readable Service Name",
@@ -72,6 +72,7 @@ class Config:
},
}
}
+ )
_EXAMPLE_FILEPICKER: dict[str, Any] = {
@@ -206,12 +207,11 @@ class ServiceGet(
): # pylint: disable=too-many-ancestors
owner: LowerCaseEmailStr | None
- class Config:
- allow_population_by_field_name = True
- extra = Extra.ignore
- schema_extra: ClassVar[dict[str, Any]] = {
- "examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]
- }
+ model_config = ConfigDict(
+ extra="ignore",
+ populate_by_name=True,
+ json_schema_extra={"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]},
+ )
class ServiceGetV2(BaseModel):
@@ -229,7 +229,7 @@ class ServiceGetV2(BaseModel):
service_type: ServiceType = Field(default=..., alias="type")
contact: LowerCaseEmailStr | None
- authors: list[Author] = Field(..., min_items=1)
+ authors: list[Author] = Field(..., min_length=1)
owner: LowerCaseEmailStr | None
inputs: ServiceInputsDict
@@ -249,11 +249,11 @@ class ServiceGetV2(BaseModel):
" It includes current release.",
)
- class Config:
- extra = Extra.forbid
- alias_generator = snake_to_camel
- allow_population_by_field_name = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ populate_by_name=True,
+ alias_generator=snake_to_camel,
+ json_schema_extra={
"examples": [
{
**_EXAMPLE_SLEEPER, # v2.2.1 (latest)
@@ -304,7 +304,8 @@ class Config:
],
},
]
- }
+ },
+ )
PageRpcServicesGetV2: TypeAlias = PageRpc[
@@ -330,12 +331,13 @@ class ServiceUpdateV2(BaseModel):
access_rights: dict[GroupID, ServiceGroupAccessRightsV2] | None = None
- class Config:
- extra = Extra.forbid
- alias_generator = snake_to_camel
- allow_population_by_field_name = True
+ model_config = ConfigDict(
+ extra="forbid",
+ populate_by_name=True,
+ alias_generator=snake_to_camel,
+ )
-assert set(ServiceUpdateV2.__fields__.keys()) - set( # nosec
- ServiceGetV2.__fields__.keys()
+assert set(ServiceUpdateV2.model_fields.keys()) - set( # nosec
+ ServiceGetV2.model_fields.keys()
) == {"deprecated"}
diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py b/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py
index ada65d69e282..8393594b0c85 100644
--- a/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py
+++ b/packages/models-library/src/models_library/api_schemas_catalog/services_ports.py
@@ -1,6 +1,6 @@
-from typing import Any, ClassVar, Literal
+from typing import Any, Literal
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from ..basic_regex import PUBLIC_VARIABLE_NAME_RE
from ..services import ServiceInput, ServiceOutput
@@ -17,7 +17,7 @@ class ServicePortGet(BaseModel):
key: str = Field(
...,
description="port identifier name",
- regex=PUBLIC_VARIABLE_NAME_RE,
+ pattern=PUBLIC_VARIABLE_NAME_RE,
title="Key name",
)
kind: PortKindStr
@@ -26,9 +26,8 @@ class ServicePortGet(BaseModel):
None,
description="jsonschema for the port's value. SEE https://json-schema.org/understanding-json-schema/",
)
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"key": "input_1",
"kind": "input",
@@ -41,6 +40,7 @@ class Config:
},
}
}
+ )
@classmethod
def from_service_io(
diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py b/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py
index aaa2b0489ae0..331ef23f83e5 100644
--- a/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py
+++ b/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py
@@ -13,9 +13,6 @@ class ServiceSpecifications(BaseModel):
description="schedule-time specifications specifications for the service (follows Docker Service creation API (specifically only the Resources part), see https://docs.docker.com/engine/api/v1.41/#tag/Service/operation/ServiceCreate",
)
- class Config:
- pass
-
class ServiceSpecificationsGet(ServiceSpecifications):
...
diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py
index ca06a39b1291..1c9892a72012 100644
--- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py
+++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py
@@ -1,13 +1,14 @@
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
from pydantic import (
AnyHttpUrl,
BaseModel,
+ ConfigDict,
Field,
HttpUrl,
NonNegativeFloat,
- root_validator,
- validator,
+ field_validator,
+ model_validator,
)
from pydantic.networks import AnyUrl
from pydantic.types import ByteSize, PositiveFloat
@@ -44,7 +45,7 @@ class WorkerMetrics(BaseModel):
class UsedResources(DictModel[str, NonNegativeFloat]):
- @root_validator(pre=True)
+ @model_validator(mode="before")
@classmethod
def ensure_negative_value_is_zero(cls, values):
# dasks adds/remove resource values and sometimes
@@ -72,7 +73,7 @@ class Scheduler(BaseModel):
status: str = Field(..., description="The running status of the scheduler")
workers: WorkersDict | None = Field(default_factory=dict)
- @validator("workers", pre=True, always=True)
+ @field_validator("workers", mode="before")
@classmethod
def ensure_workers_is_empty_dict(cls, v):
if v is None:
@@ -95,10 +96,9 @@ class ClusterGet(Cluster):
alias="accessRights", default_factory=dict
)
- class Config(Cluster.Config):
- allow_population_by_field_name = True
+ model_config = ConfigDict(populate_by_name=True)
- @root_validator(pre=True)
+ @model_validator(mode="before")
@classmethod
def ensure_access_rights_converted(cls, values):
if "access_rights" in values:
@@ -118,21 +118,8 @@ class ClusterCreate(BaseCluster):
alias="accessRights", default_factory=dict
)
- @validator("thumbnail", always=True, pre=True)
- @classmethod
- def set_default_thumbnail_if_empty(cls, v, values):
- if v is None:
- cluster_type = values["type"]
- default_thumbnails = {
- ClusterTypeInModel.AWS.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png",
- ClusterTypeInModel.ON_PREMISE.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Crystal_Clear_app_network_local.png/120px-Crystal_Clear_app_network_local.png",
- ClusterTypeInModel.ON_DEMAND.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png",
- }
- return default_thumbnails[cluster_type]
- return v
-
- class Config(BaseCluster.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"name": "My awesome cluster",
@@ -156,13 +143,27 @@ class Config(BaseCluster.Config):
"password": "somepassword",
},
"accessRights": {
- 154: CLUSTER_ADMIN_RIGHTS,
- 12: CLUSTER_MANAGER_RIGHTS,
- 7899: CLUSTER_USER_RIGHTS,
+ 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item]
+ 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item]
+ 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item]
},
},
]
}
+ )
+
+ @field_validator("thumbnail", mode="before")
+ @classmethod
+ def set_default_thumbnail_if_empty(cls, v, values):
+ if v is None:
+ cluster_type = values["type"]
+ default_thumbnails = {
+ ClusterTypeInModel.AWS.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png",
+ ClusterTypeInModel.ON_PREMISE.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Crystal_Clear_app_network_local.png/120px-Crystal_Clear_app_network_local.png",
+ ClusterTypeInModel.ON_DEMAND.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png",
+ }
+ return default_thumbnails[cluster_type]
+ return v
class ClusterPatch(BaseCluster):
@@ -177,8 +178,8 @@ class ClusterPatch(BaseCluster):
alias="accessRights"
)
- class Config(BaseCluster.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"name": "Changing the name of my cluster",
@@ -188,13 +189,14 @@ class Config(BaseCluster.Config):
},
{
"accessRights": {
- 154: CLUSTER_ADMIN_RIGHTS,
- 12: CLUSTER_MANAGER_RIGHTS,
- 7899: CLUSTER_USER_RIGHTS,
+ 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item]
+ 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item]
+ 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item]
},
},
]
}
+ )
class ClusterPing(BaseModel):
diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py
index cb8dab74d65a..e383d45f20ed 100644
--- a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py
+++ b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py
@@ -1,7 +1,7 @@
from typing import Any, TypeAlias
from models_library.basic_types import IDStr
-from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, validator
+from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, field_validator
from ..clusters import ClusterID
from ..projects import ProjectID
@@ -43,13 +43,14 @@ class ComputationCreate(BaseModel):
use_on_demand_clusters: bool = Field(
default=False,
description="if True, a cluster will be created as necessary (wallet_id cannot be None, and cluster_id must be None)",
+ validate_default=True,
)
wallet_info: WalletInfo | None = Field(
default=None,
description="contains information about the wallet used to bill the running service",
)
- @validator("product_name", always=True)
+ @field_validator("product_name")
@classmethod
def ensure_product_name_defined_if_computation_starts(cls, v, values):
if "start_pipeline" in values and values["start_pipeline"] and v is None:
@@ -57,7 +58,7 @@ def ensure_product_name_defined_if_computation_starts(cls, v, values):
raise ValueError(msg)
return v
- @validator("use_on_demand_clusters", always=True)
+ @field_validator("use_on_demand_clusters")
@classmethod
def ensure_expected_options(cls, v, values):
if v is True and ("cluster_id" in values and values["cluster_id"] is not None):
diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py
index 3515c38a5d7c..151611271a43 100644
--- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py
+++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py
@@ -1,6 +1,6 @@
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
-from pydantic import BaseModel, ByteSize, Field
+from pydantic import BaseModel, ByteSize, ConfigDict, Field
from ..resource_tracker import HardwareInfo, PricingInfo
from ..services import ServicePortKey
@@ -30,10 +30,9 @@ def from_transferred_bytes(
) -> "RetrieveDataOutEnveloped":
return cls(data=RetrieveDataOut(size_bytes=ByteSize(transferred_bytes)))
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
- "examples": [{"data": {"size_bytes": 42}}]
- }
+ model_config = ConfigDict(
+ json_schema_extra={"examples": [{"data": {"size_bytes": 42}}]}
+ )
class DynamicServiceCreate(ServiceDetails):
@@ -55,9 +54,8 @@ class DynamicServiceCreate(ServiceDetails):
default=None,
description="contains harware information (ex. aws_ec2_instances)",
)
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"key": "simcore/services/dynamic/3dviewer",
"version": "2.4.5",
@@ -67,14 +65,13 @@ class Config:
"basepath": "/x/75c7f3f4-18f9-4678-8610-54a2ade78eaa",
"product_name": "osparc",
"can_save": True,
- "service_resources": ServiceResourcesDictHelpers.Config.schema_extra[
- "examples"
- ][0],
- "wallet_info": WalletInfo.Config.schema_extra["examples"][0],
- "pricing_info": PricingInfo.Config.schema_extra["examples"][0],
- "hardware_info": HardwareInfo.Config.schema_extra["examples"][0],
+ "service_resources": ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "wallet_info": WalletInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "pricing_info": PricingInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "hardware_info": HardwareInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
}
}
+ )
DynamicServiceGet: TypeAlias = RunningDynamicServiceDetails
diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py
index 281589614ab3..f0958695e157 100644
--- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py
+++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py
@@ -1,8 +1,7 @@
from functools import cached_property
from pathlib import Path
-from typing import Any, ClassVar
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from ..basic_types import PortInt
from ..projects import ProjectID
@@ -39,10 +38,9 @@ class ServiceDetails(CommonServiceDetails):
description="predefined path where the dynamic service should be served. If empty, the service shall use the root endpoint.",
alias="service_basepath",
)
-
- class Config:
- allow_population_by_field_name = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ populate_by_name=True,
+ json_schema_extra={
"example": {
"key": "simcore/services/dynamic/3dviewer",
"version": "2.4.5",
@@ -51,7 +49,8 @@ class Config:
"node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa",
"basepath": "/x/75c7f3f4-18f9-4678-8610-54a2ade78eaa",
}
- }
+ },
+ )
class RunningDynamicServiceDetails(ServiceDetails):
@@ -89,13 +88,9 @@ class RunningDynamicServiceDetails(ServiceDetails):
alias="service_message",
)
- @cached_property
- def legacy_service_url(self) -> str:
- return f"http://{self.host}:{self.internal_port}{self.basepath}" # NOSONAR
-
- class Config(ServiceDetails.Config):
- keep_untouched = (cached_property,)
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ ignored_types=(cached_property,),
+ json_schema_extra={
"examples": [
{
"boot_type": "V0",
@@ -125,4 +120,9 @@ class Config(ServiceDetails.Config):
"node_uuid": "75c7f3f4-18f9-4678-8610-54a2ade78eaa",
},
]
- }
+ },
+ )
+
+ @cached_property
+ def legacy_service_url(self) -> str:
+ return f"http://{self.host}:{self.internal_port}{self.basepath}" # NOSONAR
diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/health.py b/packages/models-library/src/models_library/api_schemas_directorv2/health.py
index 1f578888b189..827ec533418c 100644
--- a/packages/models-library/src/models_library/api_schemas_directorv2/health.py
+++ b/packages/models-library/src/models_library/api_schemas_directorv2/health.py
@@ -1,14 +1,12 @@
-from typing import Any, ClassVar
-
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
class HealthCheckGet(BaseModel):
timestamp: str
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"timestamp": "simcore_service_directorv2.api.routes.health@2023-07-03T12:59:12.024551+00:00"
}
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/services.py b/packages/models-library/src/models_library/api_schemas_directorv2/services.py
index 6e429be4d500..3905680b3747 100644
--- a/packages/models-library/src/models_library/api_schemas_directorv2/services.py
+++ b/packages/models-library/src/models_library/api_schemas_directorv2/services.py
@@ -1,6 +1,4 @@
-from typing import Any, ClassVar
-
-from pydantic import BaseModel, Field, validator
+from pydantic import BaseModel, ConfigDict, Field, field_validator
from pydantic.types import ByteSize, NonNegativeInt
from ..service_settings_labels import ContainerSpec
@@ -23,6 +21,7 @@ class NodeRequirements(BaseModel):
None,
description="defines the required (maximum) GPU for running the services",
alias="GPU",
+ validate_default=True
)
ram: ByteSize = Field(
...,
@@ -33,17 +32,18 @@ class NodeRequirements(BaseModel):
default=None,
description="defines the required (maximum) amount of VRAM for running the services",
alias="VRAM",
+ validate_default=True
)
- @validator("vram", "gpu", always=True, pre=True)
+ @field_validator("vram", "gpu", mode="before")
@classmethod
def check_0_is_none(cls, v):
if v == 0:
v = None
return v
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"CPU": 1.0, "RAM": 4194304},
{"CPU": 1.0, "GPU": 1, "RAM": 4194304},
@@ -53,6 +53,7 @@ class Config:
},
]
}
+ )
class ServiceExtras(BaseModel):
@@ -60,11 +61,13 @@ class ServiceExtras(BaseModel):
service_build_details: ServiceBuildDetails | None = None
container_spec: ContainerSpec | None = None
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"node_requirements": node_example}
- for node_example in NodeRequirements.Config.schema_extra["examples"]
+ for node_example in NodeRequirements.model_config["json_schema_extra"][
+ "examples"
+ ] # type: ignore[index,union-attr]
]
+ [
{
@@ -75,7 +78,9 @@ class Config:
"vcs_url": "git@github.com:ITISFoundation/osparc-simcore.git",
},
}
- for node_example in NodeRequirements.Config.schema_extra["examples"]
+ for node_example in NodeRequirements.model_config["json_schema_extra"][
+ "examples"
+ ] # type: ignore[index,dict-item, union-attr]
]
+ [
{
@@ -87,6 +92,9 @@ class Config:
},
"container_spec": {"Command": ["run", "subcommand"]},
}
- for node_example in NodeRequirements.Config.schema_extra["examples"]
+ for node_example in NodeRequirements.model_config["json_schema_extra"][
+ "examples"
+ ] # type: ignore[index,union-attr]
]
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py
index 48ef3c484455..47c4fc69a184 100644
--- a/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py
+++ b/packages/models-library/src/models_library/api_schemas_dynamic_scheduler/dynamic_services.py
@@ -1,5 +1,3 @@
-from typing import Any, ClassVar
-
from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceCreate
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID
@@ -7,7 +5,7 @@
from models_library.services_resources import ServiceResourcesDictHelpers
from models_library.users import UserID
from models_library.wallets import WalletInfo
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
class DynamicServiceStart(DynamicServiceCreate):
@@ -15,8 +13,8 @@ class DynamicServiceStart(DynamicServiceCreate):
request_scheme: str
simcore_user_agent: str
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"product_name": "osparc",
"can_save": True,
@@ -28,14 +26,13 @@ class Config:
"request_dns": "some.local",
"request_scheme": "http",
"simcore_user_agent": "",
- "service_resources": ServiceResourcesDictHelpers.Config.schema_extra[
- "examples"
- ][0],
- "wallet_info": WalletInfo.Config.schema_extra["examples"][0],
- "pricing_info": PricingInfo.Config.schema_extra["examples"][0],
- "hardware_info": HardwareInfo.Config.schema_extra["examples"][0],
+ "service_resources": ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "wallet_info": WalletInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "pricing_info": PricingInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "hardware_info": HardwareInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
}
}
+ )
class DynamicServiceStop(BaseModel):
@@ -45,8 +42,8 @@ class DynamicServiceStop(BaseModel):
simcore_user_agent: str
save_state: bool
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"user_id": 234,
"project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe",
@@ -55,3 +52,4 @@ class Config:
"save_state": True,
}
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py
index 26fddb502e34..2e14ed62c162 100644
--- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py
+++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/containers.py
@@ -1,18 +1,18 @@
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
-from pydantic import BaseModel, NonNegativeFloat
+from pydantic import BaseModel, ConfigDict, NonNegativeFloat
class ActivityInfo(BaseModel):
seconds_inactive: NonNegativeFloat
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"seconds_inactive": 0},
{"seconds_inactive": 100},
]
}
+ )
ActivityInfoOrNone: TypeAlias = ActivityInfo | None
diff --git a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py
index 22d151221d82..b6fd731e9a72 100644
--- a/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py
+++ b/packages/models-library/src/models_library/api_schemas_dynamic_sidecar/telemetry.py
@@ -34,8 +34,8 @@ class DiskUsage(BaseModel):
total: ByteSize = Field(description="total space = free + used")
used_percent: float = Field(
- gte=0.00,
- lte=100.00,
+ ge=0.00,
+ le=100.00,
description="Percent of used space relative to the total space",
)
diff --git a/packages/models-library/src/models_library/api_schemas_invitations/invitations.py b/packages/models-library/src/models_library/api_schemas_invitations/invitations.py
index e25aee7c804d..8c5fd85d2e89 100644
--- a/packages/models-library/src/models_library/api_schemas_invitations/invitations.py
+++ b/packages/models-library/src/models_library/api_schemas_invitations/invitations.py
@@ -1,6 +1,6 @@
-from typing import Any, ClassVar
+from typing import Any
-from pydantic import BaseModel, Field, HttpUrl
+from pydantic import BaseModel, ConfigDict, Field, HttpUrl
from ..invitations import InvitationContent, InvitationInputs
from ..products import ProductName
@@ -13,8 +13,7 @@
class ApiInvitationInputs(InvitationInputs):
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {"example": _INPUTS_EXAMPLE}
+ model_config = ConfigDict(json_schema_extra={"example": _INPUTS_EXAMPLE})
class ApiInvitationContent(InvitationContent):
@@ -23,26 +22,27 @@ class ApiInvitationContent(InvitationContent):
..., description="This invitations can only be used for this product."
)
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
**_INPUTS_EXAMPLE,
"product": "osparc",
"created": "2023-01-11 13:11:47.293595",
}
}
+ )
class ApiInvitationContentAndLink(ApiInvitationContent):
invitation_url: HttpUrl = Field(..., description="Invitation link")
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
- **ApiInvitationContent.Config.schema_extra["example"],
+ **ApiInvitationContent.model_config["json_schema_extra"]["example"], # type: ignore[index,dict-item]
"invitation_url": "https://foo.com/#/registration?invitation=1234",
}
}
+ )
class ApiEncryptedInvitation(BaseModel):
diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py
index 1995d8c38495..a3bb93813dc0 100644
--- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py
+++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py
@@ -1,7 +1,7 @@
import logging
-from typing import TypeAlias
+from typing import Annotated, TypeAlias
-from pydantic import BaseModel, ConstrainedFloat, Field, validate_arguments, validator
+from pydantic import BaseModel, Field, field_validator, validate_call
_logger = logging.getLogger(__name__)
@@ -9,10 +9,7 @@
ProgressMessage: TypeAlias = str
-
-class ProgressPercent(ConstrainedFloat):
- ge = 0.0
- le = 1.0
+ProgressPercent: TypeAlias = Annotated[float, Field(ge=0.0, le=1.0)]
class TaskProgress(BaseModel):
@@ -25,7 +22,7 @@ class TaskProgress(BaseModel):
message: ProgressMessage = Field(default="")
percent: ProgressPercent = Field(default=0.0)
- @validate_arguments
+ @validate_call
def update(
self,
*,
@@ -47,7 +44,7 @@ def update(
def create(cls, task_id: TaskId | None = None) -> "TaskProgress":
return cls(task_id=task_id)
- @validator("percent")
+ @field_validator("percent")
@classmethod
def round_value_to_3_digit(cls, v):
return round(v, 3)
diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py
index dd0e0c0a72be..b5a8d8443b93 100644
--- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py
+++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py
@@ -2,7 +2,7 @@
from datetime import datetime
from typing import Any
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
from .base import TaskId, TaskProgress
@@ -25,7 +25,7 @@ class TaskGet(BaseModel):
result_href: str
abort_href: str
- @validator("task_name")
+ @field_validator("task_name")
@classmethod
def unquote_str(cls, v) -> str:
return urllib.parse.unquote(v)
diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py
index edb308ff39ad..c24291ebbdbb 100644
--- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py
+++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/credit_transactions.py
@@ -1,7 +1,7 @@
from datetime import datetime
from decimal import Decimal
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
from ..products import ProductName
from ..resource_tracker import CreditTransactionId
@@ -13,7 +13,7 @@ class WalletTotalCredits(BaseModel):
wallet_id: WalletID
available_osparc_credits: Decimal
- @validator("available_osparc_credits", always=True)
+ @field_validator("available_osparc_credits")
@classmethod
def ensure_rounded(cls, v):
return round(v, 2)
diff --git a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py
index 9c3ca4ba2b1a..bbb5d52f9066 100644
--- a/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py
+++ b/packages/models-library/src/models_library/api_schemas_resource_usage_tracker/pricing_plans.py
@@ -1,8 +1,7 @@
from datetime import datetime
from decimal import Decimal
-from typing import Any, ClassVar
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
from ..resource_tracker import (
HardwareInfo,
@@ -24,21 +23,24 @@ class PricingUnitGet(BaseModel):
default: bool
specific_info: HardwareInfo
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"pricing_unit_id": 1,
"unit_name": "SMALL",
- "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0],
+ "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"current_cost_per_unit": 5.7,
"current_cost_per_unit_id": 1,
"default": True,
"specific_info": hw_config_example,
}
- for hw_config_example in HardwareInfo.Config.schema_extra["examples"]
+ for hw_config_example in HardwareInfo.model_config["json_schema_extra"][
+ "examples"
+ ] # type: ignore[index,union-attr]
]
}
+ )
class PricingPlanGet(BaseModel):
@@ -51,8 +53,8 @@ class PricingPlanGet(BaseModel):
pricing_units: list[PricingUnitGet] | None
is_active: bool
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"pricing_plan_id": 1,
@@ -64,11 +66,14 @@ class Config:
"pricing_units": [pricing_unit_get_example],
"is_active": True,
}
- for pricing_unit_get_example in PricingUnitGet.Config.schema_extra[
+ for pricing_unit_get_example in PricingUnitGet.model_config[
+ "json_schema_extra"
+ ][
"examples"
- ]
+ ] # type: ignore[index,union-attr]
]
}
+ )
class PricingPlanToServiceGet(BaseModel):
@@ -77,8 +82,8 @@ class PricingPlanToServiceGet(BaseModel):
service_version: ServiceVersion
created: datetime
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"pricing_plan_id": 1,
@@ -88,3 +93,4 @@ class Config:
}
]
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py
index 29e341456bce..fac13d42d401 100644
--- a/packages/models-library/src/models_library/api_schemas_storage.py
+++ b/packages/models-library/src/models_library/api_schemas_storage.py
@@ -6,22 +6,21 @@
IMPORTANT: DO NOT COUPLE these schemas until storage is refactored
"""
-import re
from datetime import datetime
from enum import Enum
-from re import Pattern
-from typing import Any, ClassVar, TypeAlias
+from typing import Annotated, Any, TypeAlias
from uuid import UUID
from pydantic import (
BaseModel,
ByteSize,
- ConstrainedStr,
- Extra,
+ ConfigDict,
Field,
PositiveInt,
- root_validator,
- validator,
+ RootModel,
+ StringConstraints,
+ field_validator,
+ model_validator,
)
from pydantic.networks import AnyUrl
@@ -39,12 +38,9 @@
ETag: TypeAlias = str
-class S3BucketName(ConstrainedStr):
- regex: Pattern[str] | None = re.compile(S3_BUCKET_NAME_RE)
+S3BucketName: TypeAlias = Annotated[str, StringConstraints(pattern=S3_BUCKET_NAME_RE)]
-
-class DatCoreDatasetName(ConstrainedStr):
- regex: Pattern[str] | None = re.compile(DATCORE_DATASET_NAME_RE)
+DatCoreDatasetName: TypeAlias = Annotated[str, StringConstraints(pattern=DATCORE_DATASET_NAME_RE)]
# /
@@ -60,14 +56,15 @@ class FileLocation(BaseModel):
name: LocationName
id: LocationID
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{"name": "simcore.s3", "id": 0},
{"name": "datcore", "id": 1},
]
- }
+ },
+ )
FileLocationArray: TypeAlias = ListModel[FileLocation]
@@ -77,11 +74,10 @@ class Config:
class DatasetMetaDataGet(BaseModel):
dataset_id: UUID | DatCoreDatasetName
display_name: str
-
- class Config:
- extra = Extra.forbid
- orm_mode = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ from_attributes=True,
+ json_schema_extra={
"examples": [
# simcore dataset
{
@@ -106,7 +102,8 @@ class Config:
"display_name": "YetAnotherTest",
},
]
- }
+ },
+ )
# /locations/{location_id}/files/metadata:
@@ -150,17 +147,10 @@ class FileMetaDataGet(BaseModel):
description="SHA256 message digest of the file content. Main purpose: cheap lookup.",
)
- @validator("location_id", pre=True)
- @classmethod
- def ensure_location_is_integer(cls, v):
- if v is not None:
- return int(v)
- return v
-
- class Config:
- extra = Extra.forbid
- orm_mode = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ from_attributes=True,
+ json_schema_extra={
"examples": [
# typical S3 entry
{
@@ -234,11 +224,19 @@ class Config:
"project_name": None,
},
]
- }
+ },
+ )
+
+ @field_validator("location_id", mode="before")
+ @classmethod
+ def ensure_location_is_integer(cls, v):
+ if v is not None:
+ return int(v)
+ return v
-class FileMetaDataArray(BaseModel):
- __root__: list[FileMetaDataGet] = []
+class FileMetaDataArray(RootModel[list[FileMetaDataGet]]):
+ root: list[FileMetaDataGet] = Field(default_factory=list)
# /locations/{location_id}/files/{file_id}
@@ -279,7 +277,7 @@ class UploadedPart(BaseModel):
class FileUploadCompletionBody(BaseModel):
parts: list[UploadedPart]
- @validator("parts")
+ @field_validator("parts")
@classmethod
def ensure_sorted(cls, value: list[UploadedPart]) -> list[UploadedPart]:
return sorted(value, key=lambda uploaded_part: uploaded_part.number)
@@ -312,7 +310,7 @@ class FoldersBody(BaseModel):
destination: dict[str, Any] = Field(default_factory=dict)
nodes_map: dict[NodeID, NodeID] = Field(default_factory=dict)
- @root_validator()
+ @model_validator(mode="after")
@classmethod
def ensure_consistent_entries(cls, values):
source_node_keys = (NodeID(n) for n in values["source"].get("workbench", {}))
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/_base.py b/packages/models-library/src/models_library/api_schemas_webserver/_base.py
index 9856a4743e9a..718984116c7e 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/_base.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/_base.py
@@ -5,38 +5,36 @@
from typing import Any
-from pydantic import BaseModel, Extra
+from pydantic import BaseModel, ConfigDict
from ..utils.change_case import snake_to_camel
class EmptyModel(BaseModel):
- # Used to represent body={}
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class InputSchemaWithoutCamelCase(BaseModel):
- # Added to tmp keep backwards compatibility
- # until all bodies are updated
- #
- class Config:
- allow_population_by_field_name = False
- extra = Extra.ignore # Non-strict inputs policy: Used to prune extra field
- allow_mutations = False
+ model_config = ConfigDict(
+ populate_by_name=False,
+ extra="ignore", # Non-strict inputs policy: Used to prune extra field
+ frozen=True,
+ )
class InputSchema(BaseModel):
- class Config(InputSchemaWithoutCamelCase.Config):
- alias_generator = snake_to_camel
+ model_config = ConfigDict(
+ **InputSchemaWithoutCamelCase.model_config, alias_generator=snake_to_camel
+ )
class OutputSchema(BaseModel):
- class Config:
- allow_population_by_field_name = True
- extra = Extra.ignore # Used to prune extra fields from internal data
- allow_mutations = False
- alias_generator = snake_to_camel
+ model_config = ConfigDict(
+ alias_generator=snake_to_camel,
+ populate_by_name=True,
+ extra="ignore", # Used to prune extra fields from internal data
+ frozen=True,
+ )
def data(
self,
@@ -47,7 +45,7 @@ def data(
**kwargs
) -> dict[str, Any]:
"""Helper function to get envelope's data as a dict"""
- return self.dict(
+ return self.model_dump(
by_alias=True,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
@@ -64,7 +62,7 @@ def data_json(
**kwargs
) -> str:
"""Helper function to get envelope's data as a json str"""
- return self.json(
+ return self.model_dump_json(
by_alias=True,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/auth.py b/packages/models-library/src/models_library/api_schemas_webserver/auth.py
index d9f2754171d0..b0b11661cb36 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/auth.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/auth.py
@@ -1,7 +1,7 @@
from datetime import timedelta
-from typing import Any, ClassVar
+from typing import Any
-from pydantic import BaseModel, Field, SecretStr
+from pydantic import BaseModel, ConfigDict, Field, SecretStr
from ..emails import LowerCaseEmailStr
from ._base import InputSchema
@@ -11,11 +11,13 @@ class AccountRequestInfo(InputSchema):
form: dict[str, Any]
captcha: str
- class Config:
+ model_config = ConfigDict(
+ str_strip_whitespace=True,
+ str_max_length=200,
# NOTE: this is just informative. The format of the form is defined
# currently in the front-end and it might change
# SEE image in https://github.com/ITISFoundation/osparc-simcore/pull/5378
- schema_extra: ClassVar[dict[str, Any]] = {
+ json_schema_extra={
"example": {
"form": {
"firstName": "James",
@@ -35,9 +37,8 @@ class Config:
},
"captcha": "A12B34",
}
- }
- anystr_strip_whitespace = True
- max_anystr_length = 200
+ },
+ )
class UnregisterCheck(InputSchema):
@@ -57,8 +58,8 @@ class ApiKeyCreate(BaseModel):
description="Time delta from creation time to expiration. If None, then it does not expire.",
)
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"display_name": "test-api-forever",
@@ -73,6 +74,7 @@ class Config:
},
]
}
+ )
class ApiKeyGet(BaseModel):
@@ -80,9 +82,10 @@ class ApiKeyGet(BaseModel):
api_key: str
api_secret: str
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"display_name": "myapi", "api_key": "key", "api_secret": "secret"},
]
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py
index 172575a8f929..09bfa36499ad 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/catalog.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/catalog.py
@@ -1,13 +1,12 @@
-from typing import Any, ClassVar, TypeAlias
+from typing import Any, TypeAlias
-from pydantic import Extra, Field
+from pydantic import ConfigDict, Field
from pydantic.main import BaseModel
from ..api_schemas_catalog import services as api_schemas_catalog_services
from ..services_io import ServiceInput, ServiceOutput
from ..services_types import ServicePortKey
from ..utils.change_case import snake_to_camel
-from ..utils.json_serialization import json_dumps, json_loads
from ._base import InputSchema, OutputSchema
ServiceInputKey: TypeAlias = ServicePortKey
@@ -24,12 +23,9 @@ class _BaseCommonApiExtension(BaseModel):
description="Short name for the unit for display (html-compatible), if available",
)
- class Config:
- alias_generator = snake_to_camel
- allow_population_by_field_name = True
- extra = Extra.forbid
- json_dumps = json_dumps
- json_loads = json_loads
+ model_config = ConfigDict(
+ alias_generator=snake_to_camel, populate_by_name=True, extra="forbid"
+ )
class ServiceInputGet(ServiceInput, _BaseCommonApiExtension):
@@ -39,8 +35,8 @@ class ServiceInputGet(ServiceInput, _BaseCommonApiExtension):
..., description="Unique name identifier for this input"
)
- class Config(_BaseCommonApiExtension.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"displayOrder": 2,
"label": "Sleep Time",
@@ -70,6 +66,7 @@ class Config(_BaseCommonApiExtension.Config):
}
],
}
+ )
class ServiceOutputGet(ServiceOutput, _BaseCommonApiExtension):
@@ -79,8 +76,8 @@ class ServiceOutputGet(ServiceOutput, _BaseCommonApiExtension):
..., description="Unique name identifier for this input"
)
- class Config(_BaseCommonApiExtension.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"displayOrder": 2,
"label": "Time Slept",
@@ -92,6 +89,7 @@ class Config(_BaseCommonApiExtension.Config):
"keyId": "output_2",
}
}
+ )
ServiceInputsGetDict: TypeAlias = dict[ServicePortKey, ServiceInputGet]
@@ -99,7 +97,7 @@ class Config(_BaseCommonApiExtension.Config):
_EXAMPLE_FILEPICKER: dict[str, Any] = {
- **api_schemas_catalog_services.ServiceGet.Config.schema_extra["examples"][1],
+ **api_schemas_catalog_services.ServiceGet.model_config["json_schema_extra"]["examples"][1], # type: ignore [index,dict-item]
"inputs": {},
"outputs": {
"outFile": {
@@ -114,7 +112,7 @@ class Config(_BaseCommonApiExtension.Config):
}
_EXAMPLE_SLEEPER: dict[str, Any] = {
- **api_schemas_catalog_services.ServiceGet.Config.schema_extra["examples"][0],
+ **api_schemas_catalog_services.ServiceGet.model_config["json_schema_extra"]["examples"][0], # type: ignore[index,dict-item]
"inputs": {
"input_1": {
"displayOrder": 1,
@@ -224,15 +222,14 @@ class ServiceGet(api_schemas_catalog_services.ServiceGet):
..., description="outputs with extended information"
)
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
- "examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]
- }
+ model_config = ConfigDict(
+ **OutputSchema.model_config,
+ json_schema_extra={"examples": [_EXAMPLE_FILEPICKER, _EXAMPLE_SLEEPER]},
+ )
class ServiceResourcesGet(api_schemas_catalog_services.ServiceResourcesGet):
- class Config(OutputSchema.Config):
- ...
+ model_config = OutputSchema.model_config
class CatalogServiceGet(api_schemas_catalog_services.ServiceGetV2):
@@ -246,23 +243,26 @@ class CatalogServiceGet(api_schemas_catalog_services.ServiceGetV2):
..., description="outputs with extended information"
)
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ **OutputSchema.model_config,
+ json_schema_extra={
"example": {
- **api_schemas_catalog_services.ServiceGetV2.Config.schema_extra[
- "examples"
- ][0],
+ **api_schemas_catalog_services.ServiceGetV2.model_config["json_schema_extra"]["examples"][0], # type: ignore [index,dict-item]
"inputs": {
f"input{i}": example
for i, example in enumerate(
- ServiceInputGet.Config.schema_extra["examples"]
+ ServiceInputGet.model_config["json_schema_extra"]["examples"] # type: ignore[index,arg-type]
)
},
- "outputs": {"outFile": ServiceOutputGet.Config.schema_extra["example"]},
+ "outputs": {
+ "outFile": ServiceOutputGet.model_config["json_schema_extra"][
+ "example"
+ ] # type: ignore[index]
+ },
}
- }
+ },
+ )
class CatalogServiceUpdate(api_schemas_catalog_services.ServiceUpdateV2):
- class Config(InputSchema.Config):
- ...
+ model_config = InputSchema.model_config
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/clusters.py b/packages/models-library/src/models_library/api_schemas_webserver/clusters.py
index b1b897462389..109e0618b98d 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/clusters.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/clusters.py
@@ -1,4 +1,4 @@
-from pydantic import BaseModel, Extra
+from pydantic import BaseModel, ConfigDict
from ..api_schemas_directorv2 import clusters as directorv2_clusters
from ..clusters import ClusterID
@@ -7,32 +7,27 @@
class ClusterPathParams(BaseModel):
cluster_id: ClusterID
-
- class Config:
- allow_population_by_field_name = True
- extra = Extra.forbid
+ model_config = ConfigDict(
+ populate_by_name=True,
+ extra="forbid",
+ )
class ClusterGet(directorv2_clusters.ClusterGet):
- class Config(OutputSchema.Config):
- ...
+ model_config = OutputSchema.model_config
class ClusterCreate(directorv2_clusters.ClusterCreate):
- class Config(InputSchema.Config):
- ...
+ model_config = InputSchema.model_config
class ClusterPatch(directorv2_clusters.ClusterPatch):
- class Config(InputSchema.Config):
- ...
+ model_config = InputSchema.model_config
class ClusterPing(directorv2_clusters.ClusterPing):
- class Config(InputSchema.Config):
- ...
+ model_config = InputSchema.model_config
class ClusterDetails(directorv2_clusters.ClusterDetails):
- class Config(OutputSchema.Config):
- ...
+ model_config = OutputSchema.model_config
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders.py b/packages/models-library/src/models_library/api_schemas_webserver/folders.py
index e971b1f8c73c..48a2ae605e41 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/folders.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/folders.py
@@ -6,7 +6,7 @@
from models_library.projects_access import AccessRights
from models_library.users import GroupID
from models_library.utils.common_validators import null_or_none_str_to_none_validator
-from pydantic import Extra, PositiveInt, validator
+from pydantic import ConfigDict, PositiveInt, field_validator
from ._base import InputSchema, OutputSchema
@@ -33,11 +33,10 @@ class CreateFolderBodyParams(InputSchema):
description: str
parent_folder_id: FolderID | None = None
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
- _null_or_none_str_to_none_validator = validator(
- "parent_folder_id", allow_reuse=True, pre=True
+ _null_or_none_str_to_none_validator = field_validator(
+ "parent_folder_id", mode="before"
)(null_or_none_str_to_none_validator)
@@ -45,5 +44,4 @@ class PutFolderBodyParams(InputSchema):
name: IDStr
description: str
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py
index e2d4918c4355..29fed6baced1 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py
@@ -7,7 +7,7 @@
from models_library.users import GroupID
from models_library.utils.common_validators import null_or_none_str_to_none_validator
from models_library.workspaces import WorkspaceID
-from pydantic import Extra, PositiveInt, validator
+from pydantic import ConfigDict, PositiveInt, field_validator
from ._base import InputSchema, OutputSchema
@@ -32,26 +32,22 @@ class CreateFolderBodyParams(InputSchema):
name: IDStr
parent_folder_id: FolderID | None = None
workspace_id: WorkspaceID | None = None
+ model_config = ConfigDict(extra="forbid")
- class Config:
- extra = Extra.forbid
-
- _null_or_none_str_to_none_validator = validator(
- "parent_folder_id", allow_reuse=True, pre=True
+ _null_or_none_str_to_none_validator = field_validator(
+ "parent_folder_id", mode="before"
)(null_or_none_str_to_none_validator)
- _null_or_none_str_to_none_validator2 = validator(
- "workspace_id", allow_reuse=True, pre=True
+ _null_or_none_str_to_none_validator2 = field_validator(
+ "workspace_id", mode="before"
)(null_or_none_str_to_none_validator)
class PutFolderBodyParams(InputSchema):
name: IDStr
parent_folder_id: FolderID | None
+ model_config = ConfigDict(extra="forbid")
- class Config:
- extra = Extra.forbid
-
- _null_or_none_str_to_none_validator = validator(
- "parent_folder_id", allow_reuse=True, pre=True
+ _null_or_none_str_to_none_validator = field_validator(
+ "parent_folder_id", mode="before"
)(null_or_none_str_to_none_validator)
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/groups.py b/packages/models-library/src/models_library/api_schemas_webserver/groups.py
index e0b6d3fbb37c..46e9da3dc525 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/groups.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/groups.py
@@ -1,7 +1,14 @@
from contextlib import suppress
-from typing import Any, ClassVar
-from pydantic import AnyUrl, BaseModel, Field, ValidationError, parse_obj_as, validator
+from pydantic import (
+ AnyUrl,
+ BaseModel,
+ ConfigDict,
+ Field,
+ TypeAdapter,
+ ValidationError,
+ field_validator,
+)
from ..emails import LowerCaseEmailStr
@@ -18,15 +25,15 @@ class GroupAccessRights(BaseModel):
read: bool
write: bool
delete: bool
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"read": True, "write": False, "delete": False},
{"read": True, "write": True, "delete": False},
{"read": True, "write": True, "delete": True},
]
}
+ )
class UsersGroup(BaseModel):
@@ -43,17 +50,8 @@ class UsersGroup(BaseModel):
alias="inclusionRules",
)
- @validator("thumbnail", pre=True)
- @classmethod
- def sanitize_legacy_data(cls, v):
- if v:
- # Enforces null if thumbnail is not valid URL or empty
- with suppress(ValidationError):
- return parse_obj_as(AnyUrl, v)
- return None
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"gid": "27",
@@ -84,6 +82,16 @@ class Config:
},
]
}
+ )
+
+ @field_validator("thumbnail", mode="before")
+ @classmethod
+ def _sanitize_legacy_data(cls, v):
+ if v:
+ # Enforces null if thumbnail is not valid URL or empty
+ with suppress(ValidationError):
+ return TypeAdapter(AnyUrl).validate_python(v)
+ return None
class AllUsersGroups(BaseModel):
@@ -92,8 +100,8 @@ class AllUsersGroups(BaseModel):
all: UsersGroup | None = None
product: UsersGroup | None = None
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"me": {
"gid": "27",
@@ -131,6 +139,7 @@ class Config:
},
}
}
+ )
class GroupUserGet(BaseModel):
@@ -142,8 +151,8 @@ class GroupUserGet(BaseModel):
gid: str | None = Field(None, description="the user primary gid")
access_rights: GroupAccessRights = Field(..., alias="accessRights")
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"id": "1",
"login": "mr.smith@matrix.com",
@@ -158,3 +167,4 @@ class Config:
},
}
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/product.py b/packages/models-library/src/models_library/api_schemas_webserver/product.py
index da0db6032022..f967e15d548e 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/product.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/product.py
@@ -1,7 +1,7 @@
from datetime import datetime
-from typing import Any, ClassVar
+from typing import Annotated, TypeAlias
-from pydantic import ConstrainedInt, Field, HttpUrl, NonNegativeInt, PositiveInt
+from pydantic import ConfigDict, Field, HttpUrl, NonNegativeInt, PositiveInt
from ..basic_types import IDStr, NonNegativeDecimal
from ..emails import LowerCaseEmailStr
@@ -22,8 +22,8 @@ class GetCreditPrice(OutputSchema):
"Can be None if this product's price is UNDEFINED",
)
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"productName": "osparc",
@@ -37,6 +37,7 @@ class Config(OutputSchema.Config):
},
]
}
+ )
class GetProductTemplate(OutputSchema):
@@ -75,9 +76,7 @@ class GetProduct(OutputSchema):
)
-class ExtraCreditsUsdRangeInt(ConstrainedInt):
- ge = 0
- lt = 500
+ExtraCreditsUsdRangeInt: TypeAlias = Annotated[int, Field(ge=0, lt=500)]
class GenerateInvitation(InputSchema):
@@ -95,8 +94,8 @@ class InvitationGenerated(OutputSchema):
created: datetime
invitation_link: HttpUrl
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"productName": "osparc",
@@ -117,3 +116,4 @@ class Config(OutputSchema.Config):
},
]
}
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py
index b644ac52d0b4..dd1625301fa9 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py
@@ -9,14 +9,10 @@
from models_library.folders import FolderID
from models_library.workspaces import WorkspaceID
-from pydantic import Field, validator
+from pydantic import Field, HttpUrl, field_validator
from ..api_schemas_long_running_tasks.tasks import TaskGet
-from ..basic_types import (
- HttpUrlWithCustomMinLength,
- LongTruncatedStr,
- ShortTruncatedStr,
-)
+from ..basic_types import LongTruncatedStr, ShortTruncatedStr
from ..emails import LowerCaseEmailStr
from ..projects import ClassifierID, DateTimeStr, NodesDict, ProjectID
from ..projects_access import AccessRights, GroupIDStr
@@ -36,7 +32,7 @@ class ProjectCreateNew(InputSchema):
uuid: ProjectID | None = None # NOTE: suggested uuid! but could be different!
name: str
description: str | None
- thumbnail: HttpUrlWithCustomMinLength | None
+ thumbnail: HttpUrl | None
workbench: NodesDict
access_rights: dict[GroupIDStr, AccessRights]
tags: list[int] = Field(default_factory=list)
@@ -45,23 +41,23 @@ class ProjectCreateNew(InputSchema):
workspace_id: WorkspaceID | None = None
folder_id: FolderID | None = None
- _empty_is_none = validator(
- "uuid", "thumbnail", "description", allow_reuse=True, pre=True
- )(empty_str_to_none_pre_validator)
+ _empty_is_none = field_validator("uuid", "thumbnail", "description", mode="before")(
+ empty_str_to_none_pre_validator
+ )
- _null_or_none_to_none = validator(
- "workspace_id", "folder_id", allow_reuse=True, pre=True
- )(null_or_none_str_to_none_validator)
+ _null_or_none_to_none = field_validator("workspace_id", "folder_id", mode="before")(
+ null_or_none_str_to_none_validator
+ )
# NOTE: based on OVERRIDABLE_DOCUMENT_KEYS
class ProjectCopyOverride(InputSchema):
name: str
description: str | None
- thumbnail: HttpUrlWithCustomMinLength | None
+ thumbnail: HttpUrl | None
prj_owner: LowerCaseEmailStr
- _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True)(
+ _empty_is_none = field_validator("thumbnail", mode="before")(
empty_str_to_none_pre_validator
)
@@ -70,7 +66,7 @@ class ProjectGet(OutputSchema):
uuid: ProjectID
name: str
description: str
- thumbnail: HttpUrlWithCustomMinLength | Literal[""]
+ thumbnail: HttpUrl | Literal[""]
creation_date: DateTimeStr
last_change_date: DateTimeStr
workbench: NodesDict
@@ -78,14 +74,14 @@ class ProjectGet(OutputSchema):
access_rights: dict[GroupIDStr, AccessRights]
tags: list[int]
classifiers: list[ClassifierID] = []
- state: ProjectState | None
- ui: EmptyModel | StudyUI | None
+ state: ProjectState | None = None
+ ui: EmptyModel | StudyUI | None = None
quality: dict[str, Any] = {}
- dev: dict | None
+ dev: dict | None = None
permalink: ProjectPermalink = FieldNotRequired()
- workspace_id: WorkspaceID | None
+ workspace_id: WorkspaceID | None = None
- _empty_description = validator("description", allow_reuse=True, pre=True)(
+ _empty_description = field_validator("description", mode="before")(
none_to_empty_str_pre_validator
)
@@ -101,7 +97,7 @@ class ProjectReplace(InputSchema):
uuid: ProjectID
name: ShortTruncatedStr
description: LongTruncatedStr
- thumbnail: HttpUrlWithCustomMinLength | None
+ thumbnail: HttpUrl | None
creation_date: DateTimeStr
last_change_date: DateTimeStr
workbench: NodesDict
@@ -115,7 +111,7 @@ class ProjectReplace(InputSchema):
default_factory=dict,
)
- _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True)(
+ _empty_is_none = field_validator("thumbnail", mode="before")(
empty_str_to_none_pre_validator
)
@@ -123,7 +119,7 @@ class ProjectReplace(InputSchema):
class ProjectUpdate(InputSchema):
name: ShortTruncatedStr = FieldNotRequired()
description: LongTruncatedStr = FieldNotRequired()
- thumbnail: HttpUrlWithCustomMinLength = FieldNotRequired()
+ thumbnail: HttpUrl = FieldNotRequired()
workbench: NodesDict = FieldNotRequired()
access_rights: dict[GroupIDStr, AccessRights] = FieldNotRequired()
tags: list[int] = FieldNotRequired()
@@ -135,7 +131,7 @@ class ProjectUpdate(InputSchema):
class ProjectPatch(InputSchema):
name: ShortTruncatedStr = FieldNotRequired()
description: LongTruncatedStr = FieldNotRequired()
- thumbnail: HttpUrlWithCustomMinLength = FieldNotRequired()
+ thumbnail: HttpUrl = FieldNotRequired()
access_rights: dict[GroupIDStr, AccessRights] = FieldNotRequired()
classifiers: list[ClassifierID] = FieldNotRequired()
dev: dict | None = FieldNotRequired()
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py
index 25a6f5fb0dd8..cda166e0d138 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py
@@ -1,7 +1,7 @@
# mypy: disable-error-code=truthy-function
-from typing import Any, ClassVar, Literal, TypeAlias
+from typing import Any, Literal, TypeAlias
-from pydantic import Field
+from pydantic import ConfigDict, Field
from ..api_schemas_directorv2.dynamic_services import RetrieveDataOut
from ..basic_types import PortInt
@@ -62,13 +62,13 @@ class NodeGet(OutputSchema):
service_key: ServiceKey = Field(
...,
description="distinctive name for the node based on the docker registry path",
- example=[
+ examples=[
"simcore/services/comp/itis/sleeper",
"simcore/services/dynamic/3dviewer",
],
)
service_version: ServiceVersion = Field(
- ..., description="semantic version number", example=["1.0.0", "0.0.1"]
+ ..., description="semantic version number", examples=["1.0.0", "0.0.1"]
)
service_host: str = Field(
...,
@@ -90,9 +90,8 @@ class NodeGet(OutputSchema):
description="the service message",
)
user_id: str = Field(..., description="the user that started the service")
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"published_port": 30000,
"entrypoint": "/the/entry/point/is/here",
@@ -107,6 +106,7 @@ class Config:
"user_id": 123,
}
}
+ )
class NodeGetIdle(OutputSchema):
@@ -117,30 +117,32 @@ class NodeGetIdle(OutputSchema):
def from_node_id(cls, node_id: NodeID) -> "NodeGetIdle":
return cls(service_state="idle", service_uuid=node_id)
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"service_state": "idle",
}
}
+ )
class NodeGetUnknown(OutputSchema):
service_state: Literal["unknown"]
service_uuid: NodeID
- @classmethod
- def from_node_id(cls, node_id: NodeID) -> "NodeGetUnknown":
- return cls(service_state="unknown", service_uuid=node_id)
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"service_state": "unknown",
}
}
+ )
+
+ @classmethod
+ def from_node_id(cls, node_id: NodeID) -> "NodeGetUnknown":
+ return cls(service_state="unknown", service_uuid=node_id)
class NodeOutputs(InputSchemaWithoutCamelCase):
@@ -152,5 +154,4 @@ class NodeRetrieve(InputSchemaWithoutCamelCase):
class NodeRetrieved(RetrieveDataOut):
- class Config(OutputSchema.Config):
- ...
+ model_config = OutputSchema.model_config
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py
index df38c862900c..6582542525b7 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_ports.py
@@ -15,19 +15,16 @@ class _ProjectIOBase(BaseModel):
class ProjectInputUpdate(_ProjectIOBase):
- class Config(InputSchemaWithoutCamelCase):
- ...
+ model_config = InputSchemaWithoutCamelCase.model_config
class ProjectInputGet(OutputSchema, _ProjectIOBase):
label: str
- class Config(InputSchemaWithoutCamelCase):
- ...
+ model_config = InputSchemaWithoutCamelCase.model_config
class ProjectOutputGet(_ProjectIOBase):
label: str
- class Config(OutputSchema):
- ...
+ model_config = OutputSchema.model_config
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py
index fa150f9ffc6b..8242105f55a9 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/resource_usage.py
@@ -1,7 +1,7 @@
from datetime import datetime
from decimal import Decimal
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
from ..projects import ProjectID
from ..projects_nodes_io import NodeID
@@ -95,9 +95,10 @@ class CreatePricingPlanBodyParams(InputSchema):
classification: PricingPlanClassification
pricing_plan_key: str
- class Config:
- anystr_strip_whitespace = True
- max_anystr_length = 200
+ model_config = ConfigDict(
+ str_strip_whitespace=True,
+ str_max_length=200,
+ )
class UpdatePricingPlanBodyParams(InputSchema):
@@ -105,9 +106,10 @@ class UpdatePricingPlanBodyParams(InputSchema):
description: str
is_active: bool
- class Config:
- anystr_strip_whitespace = True
- max_anystr_length = 200
+ model_config = ConfigDict(
+ str_strip_whitespace=True,
+ str_max_length=200,
+ )
class CreatePricingUnitBodyParams(InputSchema):
@@ -118,9 +120,10 @@ class CreatePricingUnitBodyParams(InputSchema):
cost_per_unit: Decimal
comment: str
- class Config:
- anystr_strip_whitespace = True
- max_anystr_length = 200
+ model_config = ConfigDict(
+ str_strip_whitespace=True,
+ str_max_length=200,
+ )
class UpdatePricingUnitBodyParams(InputSchema):
@@ -130,15 +133,17 @@ class UpdatePricingUnitBodyParams(InputSchema):
specific_info: SpecificInfo
pricing_unit_cost_update: PricingUnitCostUpdate | None
- class Config:
- anystr_strip_whitespace = True
- max_anystr_length = 200
+ model_config = ConfigDict(
+ str_strip_whitespace=True,
+ str_max_length=200,
+ )
class ConnectServiceToPricingPlanBodyParams(InputSchema):
service_key: ServiceKey
service_version: ServiceVersion
- class Config:
- anystr_strip_whitespace = True
- max_anystr_length = 200
+ model_config = ConfigDict(
+ str_strip_whitespace=True,
+ str_max_length=200,
+ )
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/wallets.py b/packages/models-library/src/models_library/api_schemas_webserver/wallets.py
index af0aa61ac809..f9ebbd9fb2d2 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/wallets.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/wallets.py
@@ -1,8 +1,8 @@
from datetime import datetime
from decimal import Decimal
-from typing import Any, ClassVar, Literal, TypeAlias
+from typing import Literal, TypeAlias
-from pydantic import Field, HttpUrl, validator
+from pydantic import ConfigDict, Field, HttpUrl, field_validator
from ..basic_types import AmountDecimal, IDStr, NonNegativeDecimal
from ..users import GroupID
@@ -91,8 +91,8 @@ class PaymentMethodInitiated(OutputSchema):
..., description="Link to external site that holds the payment submission form"
)
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"wallet_id": 1,
@@ -101,6 +101,7 @@ class Config(OutputSchema.Config):
}
]
}
+ )
class PaymentMethodTransaction(OutputSchema):
@@ -109,8 +110,8 @@ class PaymentMethodTransaction(OutputSchema):
payment_method_id: PaymentMethodID
state: Literal["PENDING", "SUCCESS", "FAILED", "CANCELED"]
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"walletId": 1,
@@ -119,6 +120,7 @@ class Config(OutputSchema.Config):
}
]
}
+ )
class PaymentMethodGet(OutputSchema):
@@ -135,8 +137,8 @@ class PaymentMethodGet(OutputSchema):
description="If true, this payment-method is used for auto-recharge",
)
- class Config(OutputSchema.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"idr": "pm_1234567890",
@@ -157,6 +159,7 @@ class Config(OutputSchema.Config):
},
],
}
+ )
#
@@ -194,7 +197,7 @@ class ReplaceWalletAutoRecharge(InputSchema):
top_up_amount_in_usd: NonNegativeDecimal
monthly_limit_in_usd: NonNegativeDecimal | None
- @validator("monthly_limit_in_usd")
+ @field_validator("monthly_limit_in_usd")
@classmethod
def _monthly_limit_greater_than_top_up(cls, v, values):
top_up = values["top_up_amount_in_usd"]
diff --git a/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py b/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py
index 0ba98ab4ec31..32f17200ee4c 100644
--- a/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py
+++ b/packages/models-library/src/models_library/api_schemas_webserver/workspaces.py
@@ -4,7 +4,7 @@
from models_library.basic_types import IDStr
from models_library.users import GroupID
from models_library.workspaces import WorkspaceID
-from pydantic import Extra, PositiveInt
+from pydantic import ConfigDict, PositiveInt
from ..access_rights import AccessRights
from ._base import InputSchema, OutputSchema
@@ -31,8 +31,7 @@ class CreateWorkspaceBodyParams(InputSchema):
description: str | None = None
thumbnail: str | None = None
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class PutWorkspaceBodyParams(InputSchema):
@@ -40,5 +39,4 @@ class PutWorkspaceBodyParams(InputSchema):
description: str | None = None
thumbnail: str | None = None
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py
index eb202fa188b8..51441fe39e63 100644
--- a/packages/models-library/src/models_library/basic_regex.py
+++ b/packages/models-library/src/models_library/basic_regex.py
@@ -46,7 +46,7 @@
# Storage basic file ID
SIMCORE_S3_FILE_ID_RE = rf"^(api|({UUID_RE_BASE}))\/({UUID_RE_BASE})\/(.+)$"
-SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)/$"
+SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)\/$"
# S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html]
S3_BUCKET_NAME_RE = r"(?!(^xn--|-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$"
diff --git a/packages/models-library/src/models_library/basic_types.py b/packages/models-library/src/models_library/basic_types.py
index 18788e188a86..d61eff226176 100644
--- a/packages/models-library/src/models_library/basic_types.py
+++ b/packages/models-library/src/models_library/basic_types.py
@@ -1,15 +1,11 @@
-import re
+from decimal import Decimal
from enum import StrEnum
-from typing import Final, TypeAlias
+from re import Pattern
+from typing import Annotated, Final, TypeAlias
-
-from pydantic import (
- ConstrainedDecimal,
- ConstrainedInt,
- ConstrainedStr,
- HttpUrl,
- PositiveInt,
-)
+import pydantic
+from pydantic import Field, PositiveInt, StringConstraints
+from pydantic_core import core_schema
from .basic_regex import (
PROPERTY_KEY_RE,
@@ -18,68 +14,44 @@
UUID_RE,
)
+NonNegativeDecimal: TypeAlias = Annotated[Decimal, Field(ge=0)]
-class NonNegativeDecimal(ConstrainedDecimal):
- ge = 0
-
-
-class PositiveDecimal(ConstrainedDecimal):
- gt = 0
-
-
-class AmountDecimal(ConstrainedDecimal):
- # Used for amounts like credits or dollars
- # NOTE: upper limit to avoid https://github.com/ITISFoundation/appmotion-exchange/issues/2
- # NOTE: do not contraint in decimal places. Too strong validation error rather Decimal.quantize
- # before passing the value
- gt = 0
- lt = 1e6
+PositiveDecimal: TypeAlias = Annotated[Decimal, Field(gt=0)]
+# Used for amounts like credits or dollars
+# NOTE: upper limit to avoid https://github.com/ITISFoundation/appmotion-exchange/issues/2
+# NOTE: do not contraint in decimal places. Too strong validation error rather Decimal.quantize
+# before passing the value
+AmountDecimal: TypeAlias = Annotated[Decimal, Field(gt=0, lt=1e6)]
# port number range
-class PortInt(ConstrainedInt):
- gt = 0
- lt = 65535
-
+PortInt: TypeAlias = Annotated[int, Field(gt=0, lt=65535)]
# e.g. 'v5'
-class VersionTag(ConstrainedStr):
- regex = re.compile(r"^v\d$")
-
-
-class VersionStr(ConstrainedStr):
- regex = re.compile(SIMPLE_VERSION_RE)
+VersionTag: TypeAlias = Annotated[str, StringConstraints(pattern=r"^v\d$")]
+VersionStr: TypeAlias = Annotated[str, StringConstraints(pattern=SIMPLE_VERSION_RE)]
# e.g. '1.23.11' or '2.1.0-rc2' or not 0.1.0-alpha (see test_SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS)
-class SemanticVersionStr(ConstrainedStr):
- regex = re.compile(SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS)
-
+SemanticVersionStr: TypeAlias = Annotated[
+ str, StringConstraints(pattern=SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS)
+]
# checksums
# sha1sum path/to/file
-class SHA1Str(ConstrainedStr):
- regex = re.compile(r"^[a-fA-F0-9]{40}$")
-
+SHA1Str: TypeAlias = Annotated[str, StringConstraints(pattern=r"^[a-fA-F0-9]{40}$")]
# sha256sum path/to/file
-class SHA256Str(ConstrainedStr):
- regex = re.compile(r"^[a-fA-F0-9]{64}$")
-
+SHA256Str: TypeAlias = Annotated[str, StringConstraints(pattern=r"^[a-fA-F0-9]{64}$")]
# md5sum path/to/file
-class MD5Str(ConstrainedStr):
- regex = re.compile(r"^[a-fA-F0-9]{32}$")
-
+MD5Str: TypeAlias = Annotated[str, StringConstraints(pattern=r"^[a-fA-F0-9]{32}$")]
# env var
-class EnvVarKey(ConstrainedStr):
- regex = re.compile(r"[a-zA-Z]\w*")
-
+EnvVarKey: TypeAlias = Annotated[str, StringConstraints(pattern=r"^[a-zA-Z]\w*")]
# e.g. '5c833a78-1af3-43a7-9ed7-6a63b188f4d8'
-class UUIDStr(ConstrainedStr):
- regex = re.compile(UUID_RE)
+UUIDStr: TypeAlias = Annotated[str, StringConstraints(pattern=UUID_RE)]
# non-empty bounded string used as identifier
@@ -87,6 +59,32 @@ class UUIDStr(ConstrainedStr):
_ELLIPSIS_CHAR: Final[str] = "..."
+class ConstrainedStr(str):
+ pattern: str | Pattern[str] | None = None
+ min_length: int | None = None
+ max_length: int | None = None
+ strip_whitespace: bool = False
+ curtail_length: int | None = None
+
+ @classmethod
+ def _validate(cls, __input_value: str) -> str:
+ if cls.curtail_length and len(__input_value) > cls.curtail_length:
+ __input_value = __input_value[: cls.curtail_length]
+ return cls(__input_value)
+
+ @classmethod
+ def __get_pydantic_core_schema__(cls, _source_type, _handler):
+ return core_schema.no_info_after_validator_function(
+ cls._validate,
+ core_schema.str_schema(
+ pattern=cls.pattern,
+ min_length=cls.min_length,
+ max_length=cls.max_length,
+ strip_whitespace=cls.strip_whitespace,
+ ),
+ )
+
+
class IDStr(ConstrainedStr):
strip_whitespace = True
min_length = 1
@@ -132,6 +130,9 @@ class LongTruncatedStr(ConstrainedStr):
IdInt: TypeAlias = PositiveInt
PrimaryKeyInt: TypeAlias = PositiveInt
+AnyHttpUrl = Annotated[str, pydantic.AnyHttpUrl]
+
+HttpUrl = Annotated[str, pydantic.HttpUrl]
# https e.g. https://techterms.com/definition/https
class HttpSecureUrl(HttpUrl):
@@ -180,5 +181,4 @@ class BuildTargetEnum(StrEnum):
DEVELOPMENT = "development"
-class KeyIDStr(ConstrainedStr):
- regex = re.compile(PROPERTY_KEY_RE)
+KeyIDStr = Annotated[str, StringConstraints(pattern=PROPERTY_KEY_RE)]
diff --git a/packages/models-library/src/models_library/boot_options.py b/packages/models-library/src/models_library/boot_options.py
index ec1aabd546b7..52756bf10971 100644
--- a/packages/models-library/src/models_library/boot_options.py
+++ b/packages/models-library/src/models_library/boot_options.py
@@ -1,6 +1,4 @@
-from typing import Any, ClassVar
-
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, ConfigDict, ValidationInfo, field_validator
from typing_extensions import TypedDict
from .basic_types import EnvVarKey
@@ -17,17 +15,17 @@ class BootOption(BaseModel):
default: str
items: dict[str, BootChoice]
- @validator("items")
+ @field_validator("items")
@classmethod
- def ensure_default_included(cls, v, values):
- default = values["default"]
+ def ensure_default_included(cls, v, info: ValidationInfo):
+ default = info.data["default"]
if default not in v:
msg = f"Expected default={default} to be present a key of items={v}"
raise ValueError(msg)
return v
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"label": "Boot mode",
@@ -61,6 +59,7 @@ class Config:
},
]
}
+ )
BootOptions = dict[EnvVarKey, BootOption]
diff --git a/packages/models-library/src/models_library/callbacks_mapping.py b/packages/models-library/src/models_library/callbacks_mapping.py
index 9e4e88214cef..498766ed7502 100644
--- a/packages/models-library/src/models_library/callbacks_mapping.py
+++ b/packages/models-library/src/models_library/callbacks_mapping.py
@@ -1,7 +1,7 @@
from collections.abc import Sequence
-from typing import Any, ClassVar, Final
+from typing import Final
-from pydantic import BaseModel, Extra, Field, NonNegativeFloat, validator
+from pydantic import BaseModel, ConfigDict, Field, NonNegativeFloat, field_validator
INACTIVITY_TIMEOUT_CAP: Final[NonNegativeFloat] = 5
TIMEOUT_MIN: Final[NonNegativeFloat] = 1
@@ -15,15 +15,15 @@ class UserServiceCommand(BaseModel):
timeout: NonNegativeFloat = Field(
..., description="after this interval the command will be timed-out"
)
-
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{"service": "rt-web", "command": "ls", "timeout": 1},
{"service": "s4l-core", "command": ["ls", "-lah"], "timeout": 1},
]
- }
+ },
+ )
class CallbacksMapping(BaseModel):
@@ -47,24 +47,9 @@ class CallbacksMapping(BaseModel):
),
)
- @validator("inactivity")
- @classmethod
- def ensure_inactivity_timeout_is_capped(
- cls, v: UserServiceCommand
- ) -> UserServiceCommand:
- if v is not None and (
- v.timeout < TIMEOUT_MIN or v.timeout > INACTIVITY_TIMEOUT_CAP
- ):
- msg = (
- f"Constraint not respected for inactivity timeout={v.timeout}: "
- f"interval=({TIMEOUT_MIN}, {INACTIVITY_TIMEOUT_CAP})"
- )
- raise ValueError(msg)
- return v
-
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{
# empty validates
@@ -73,21 +58,37 @@ class Config:
"metrics": None,
"before_shutdown": [],
},
- {"metrics": UserServiceCommand.Config.schema_extra["examples"][0]},
+ {"metrics": UserServiceCommand.model_config["json_schema_extra"]["examples"][0]}, # type: ignore [index]
{
- "metrics": UserServiceCommand.Config.schema_extra["examples"][0],
+ "metrics": UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"before_shutdown": [
- UserServiceCommand.Config.schema_extra["examples"][0],
- UserServiceCommand.Config.schema_extra["examples"][1],
+ UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ UserServiceCommand.model_config["json_schema_extra"]["examples"][1], # type: ignore [index]
],
},
{
- "metrics": UserServiceCommand.Config.schema_extra["examples"][0],
+ "metrics": UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"before_shutdown": [
- UserServiceCommand.Config.schema_extra["examples"][0],
- UserServiceCommand.Config.schema_extra["examples"][1],
+ UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ UserServiceCommand.model_config["json_schema_extra"]["examples"][1], # type: ignore [index]
],
- "inactivity": UserServiceCommand.Config.schema_extra["examples"][0],
+ "inactivity": UserServiceCommand.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
},
]
- }
+ },
+ )
+
+ @field_validator("inactivity")
+ @classmethod
+ def ensure_inactivity_timeout_is_capped(
+ cls, v: UserServiceCommand
+ ) -> UserServiceCommand:
+ if v is not None and (
+ v.timeout < TIMEOUT_MIN or v.timeout > INACTIVITY_TIMEOUT_CAP
+ ):
+ msg = (
+ f"Constraint not respected for inactivity timeout={v.timeout}: "
+ f"interval=({TIMEOUT_MIN}, {INACTIVITY_TIMEOUT_CAP})"
+ )
+ raise ValueError(msg)
+ return v
diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py
index c51598b06ee1..1dbcff0bc702 100644
--- a/packages/models-library/src/models_library/clusters.py
+++ b/packages/models-library/src/models_library/clusters.py
@@ -1,16 +1,16 @@
from enum import auto
from pathlib import Path
-from typing import Any, ClassVar, Final, Literal, TypeAlias
+from typing import Final, Literal, TypeAlias
from pydantic import (
AnyUrl,
BaseModel,
- Extra,
+ ConfigDict,
Field,
HttpUrl,
SecretStr,
- root_validator,
- validator,
+ field_validator,
+ model_validator,
)
from pydantic.types import NonNegativeInt
@@ -32,8 +32,7 @@ class ClusterAccessRights(BaseModel):
write: bool = Field(..., description="allows to modify the cluster")
delete: bool = Field(..., description="allows to delete a cluster")
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
CLUSTER_ADMIN_RIGHTS = ClusterAccessRights(read=True, write=True, delete=True)
@@ -45,9 +44,7 @@ class Config:
class BaseAuthentication(BaseModel):
type: str
- class Config:
- frozen = True
- extra = Extra.forbid
+ model_config = ConfigDict(frozen=True, extra="forbid")
class SimpleAuthentication(BaseAuthentication):
@@ -55,8 +52,8 @@ class SimpleAuthentication(BaseAuthentication):
username: str
password: SecretStr
- class Config(BaseAuthentication.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"type": "simple",
@@ -65,32 +62,34 @@ class Config(BaseAuthentication.Config):
},
]
}
+ )
class KerberosAuthentication(BaseAuthentication):
type: Literal["kerberos"] = "kerberos"
- # NOTE: the entries here still need to be defined
- class Config(BaseAuthentication.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"type": "kerberos",
},
]
}
+ )
class JupyterHubTokenAuthentication(BaseAuthentication):
type: Literal["jupyterhub"] = "jupyterhub"
api_token: str
- class Config(BaseAuthentication.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"type": "jupyterhub", "api_token": "some_jupyterhub_token"},
]
}
+ )
class NoAuthentication(BaseAuthentication):
@@ -103,8 +102,8 @@ class TLSAuthentication(BaseAuthentication):
tls_client_cert: Path
tls_client_key: Path
- class Config(BaseAuthentication.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"type": "tls",
@@ -114,6 +113,7 @@ class Config(BaseAuthentication.Config):
},
]
}
+ )
InternalClusterAuthentication: TypeAlias = NoAuthentication | TLSAuthentication
@@ -134,6 +134,7 @@ class BaseCluster(BaseModel):
default=None,
description="url to the image describing this cluster",
examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"],
+ validate_default=True
)
endpoint: AnyUrl
authentication: ClusterAuthentication = Field(
@@ -141,13 +142,11 @@ class BaseCluster(BaseModel):
)
access_rights: dict[GroupID, ClusterAccessRights] = Field(default_factory=dict)
- _from_equivalent_enums = validator("type", allow_reuse=True, pre=True)(
+ _from_equivalent_enums = field_validator("type", mode="before")(
create_enums_pre_validator(ClusterTypeInModel)
)
- class Config:
- extra = Extra.forbid
- use_enum_values = True
+ model_config = ConfigDict(extra="forbid", use_enum_values=True)
ClusterID: TypeAlias = NonNegativeInt
@@ -157,8 +156,8 @@ class Config:
class Cluster(BaseCluster):
id: ClusterID = Field(..., description="The cluster ID")
- class Config(BaseCluster.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"id": DEFAULT_CLUSTER_ID,
@@ -193,9 +192,9 @@ class Config(BaseCluster.Config):
"endpoint": "https://registry.osparc-development.fake.dev",
"authentication": {"type": "kerberos"},
"access_rights": {
- 154: CLUSTER_ADMIN_RIGHTS,
- 12: CLUSTER_MANAGER_RIGHTS,
- 7899: CLUSTER_USER_RIGHTS,
+ 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item]
+ 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item]
+ 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item]
},
},
{
@@ -210,15 +209,16 @@ class Config(BaseCluster.Config):
"api_token": "some_fake_token",
},
"access_rights": {
- 154: CLUSTER_ADMIN_RIGHTS,
- 12: CLUSTER_MANAGER_RIGHTS,
- 7899: CLUSTER_USER_RIGHTS,
+ 154: CLUSTER_ADMIN_RIGHTS, # type: ignore[dict-item]
+ 12: CLUSTER_MANAGER_RIGHTS, # type: ignore[dict-item]
+ 7899: CLUSTER_USER_RIGHTS, # type: ignore[dict-item]
},
},
]
}
+ )
- @root_validator(pre=True)
+ @model_validator(mode="before")
@classmethod
def check_owner_has_access_rights(cls, values):
is_default_cluster = bool(values["id"] == DEFAULT_CLUSTER_ID)
diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py
index 732dfc08197a..75c003fff89a 100644
--- a/packages/models-library/src/models_library/docker.py
+++ b/packages/models-library/src/models_library/docker.py
@@ -1,18 +1,20 @@
import contextlib
import re
-from typing import Any, ClassVar, Final
+from typing import Annotated, Any, Final, TypeAlias
from pydantic import (
BaseModel,
ByteSize,
- ConstrainedStr,
+ ConfigDict,
Field,
+ StringConstraints,
+ TypeAdapter,
ValidationError,
- parse_obj_as,
- root_validator,
+ model_validator,
)
from .basic_regex import DOCKER_GENERIC_TAG_KEY_RE, DOCKER_LABEL_KEY_REGEX
+from .basic_types import ConstrainedStr
from .generated_models.docker_rest_api import Task
from .products import ProductName
from .projects import ProjectID
@@ -23,16 +25,15 @@
class DockerLabelKey(ConstrainedStr):
# NOTE: https://docs.docker.com/config/labels-custom-metadata/#key-format-recommendations
# good practice: use reverse DNS notation
- regex: re.Pattern[str] | None = DOCKER_LABEL_KEY_REGEX
+ pattern = DOCKER_LABEL_KEY_REGEX
@classmethod
def from_key(cls, key: str) -> "DockerLabelKey":
return cls(key.lower().replace("_", "-"))
-class DockerGenericTag(ConstrainedStr):
- # NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description
- regex: re.Pattern[str] | None = DOCKER_GENERIC_TAG_KEY_RE
+# NOTE: https://docs.docker.com/engine/reference/commandline/tag/#description
+DockerGenericTag: TypeAlias = Annotated[str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE)]
class DockerPlacementConstraint(ConstrainedStr):
@@ -60,7 +61,7 @@ class DockerPlacementConstraint(ConstrainedStr):
DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: Final[
DockerLabelKey
-] = parse_obj_as(DockerLabelKey, "ec2-instance-type")
+] = TypeAdapter(DockerLabelKey).validate_python("ec2-instance-type")
def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey:
@@ -99,7 +100,7 @@ class StandardSimcoreDockerLabels(BaseModel):
..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit"
)
- @root_validator(pre=True)
+ @model_validator(mode="before")
@classmethod
def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]:
# NOTE: this is necessary for dy-sidecar and legacy service until they are adjusted
@@ -122,7 +123,7 @@ def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]:
def _convert_nano_cpus_to_cpus(nano_cpu: str) -> str:
with contextlib.suppress(ValidationError):
- return f"{parse_obj_as(float, nano_cpu) / (1.0*10**9):.2f}"
+ return f"{TypeAdapter(float).validate_python(nano_cpu) / (1.0*10**9):.2f}"
return _UNDEFINED_LABEL_VALUE_INT
mapped_values.setdefault(
@@ -144,13 +145,13 @@ def to_simcore_runtime_docker_labels(self) -> dict[DockerLabelKey, str]:
@classmethod
def from_docker_task(cls, docker_task: Task) -> "StandardSimcoreDockerLabels":
assert docker_task.Spec # nosec
- assert docker_task.Spec.ContainerSpec # nosec
- task_labels = docker_task.Spec.ContainerSpec.Labels or {}
- return cls.parse_obj(task_labels)
+ assert docker_task.Spec.ContainerSpec_ # nosec
+ task_labels = docker_task.Spec.ContainerSpec_.Labels or {}
+ return cls.model_validate(task_labels)
- class Config:
- allow_population_by_field_name = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ populate_by_name=True,
+ json_schema_extra={
"examples": [
# legacy service labels
{
@@ -219,4 +220,5 @@ class Config:
"io.simcore.runtime.user-id": "5",
},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/emails.py b/packages/models-library/src/models_library/emails.py
index 80996eed76fa..72835f4c754a 100644
--- a/packages/models-library/src/models_library/emails.py
+++ b/packages/models-library/src/models_library/emails.py
@@ -1,7 +1,5 @@
-from pydantic import EmailStr
+from typing import Annotated, TypeAlias
+from pydantic import AfterValidator, EmailStr
-class LowerCaseEmailStr(EmailStr):
- @classmethod
- def validate(cls, value: str) -> str:
- return super().validate(value).lower()
+LowerCaseEmailStr: TypeAlias = Annotated[str, EmailStr, AfterValidator(str.lower)]
diff --git a/packages/models-library/src/models_library/errors_classes.py b/packages/models-library/src/models_library/errors_classes.py
index dab24fb40099..6f6d9e0c2661 100644
--- a/packages/models-library/src/models_library/errors_classes.py
+++ b/packages/models-library/src/models_library/errors_classes.py
@@ -7,12 +7,21 @@ def __missing__(self, key):
class OsparcErrorMixin(PydanticErrorMixin):
- def __new__(cls, *args, **kwargs):
+ msg_template: str
+
+ def __new__(cls, *_args, **_kwargs):
if not hasattr(cls, "code"):
- cls.code = cls._get_full_class_name()
- return super().__new__(cls, *args, **kwargs)
+ cls.code = cls._get_full_class_name() # type: ignore[assignment]
+ return super().__new__(cls)
+
+ def __init__(self, *_args, **kwargs) -> None:
+ self.__dict__ = kwargs
+ super().__init__(message=self._build_message(), code=self.code)
def __str__(self) -> str:
+ return self._build_message()
+
+ def _build_message(self) -> str:
# NOTE: safe. Does not raise KeyError
return self.msg_template.format_map(_DefaultDict(**self.__dict__))
diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py
index 73262e1e647c..829e3865c514 100644
--- a/packages/models-library/src/models_library/folders.py
+++ b/packages/models-library/src/models_library/folders.py
@@ -3,7 +3,7 @@
from models_library.users import GroupID, UserID
from models_library.workspaces import WorkspaceID
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt
FolderID: TypeAlias = PositiveInt
@@ -32,5 +32,4 @@ class FolderDB(BaseModel):
user_id: UserID | None
workspace_id: WorkspaceID | None
- class Config:
- orm_mode = True
+ model_config = ConfigDict(from_attributes=True)
diff --git a/packages/models-library/src/models_library/function_services_catalog/_settings.py b/packages/models-library/src/models_library/function_services_catalog/_settings.py
index 3ca4260d8ceb..05812b81879a 100644
--- a/packages/models-library/src/models_library/function_services_catalog/_settings.py
+++ b/packages/models-library/src/models_library/function_services_catalog/_settings.py
@@ -1,7 +1,7 @@
import json
import os
-from pydantic import BaseSettings
+from pydantic_settings import BaseSettings
# Expects env var: FUNCTION_SERVICES_AUTHORS='{"OM":{"name": ...}, "EN":{...} }'
try:
diff --git a/packages/models-library/src/models_library/generated_models/docker_rest_api.py b/packages/models-library/src/models_library/generated_models/docker_rest_api.py
index 835141ea037e..0170672cb266 100644
--- a/packages/models-library/src/models_library/generated_models/docker_rest_api.py
+++ b/packages/models-library/src/models_library/generated_models/docker_rest_api.py
@@ -8,11 +8,11 @@
from enum import Enum
from typing import Any
-from pydantic import BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field, RootModel
-class Model(BaseModel):
- __root__: Any
+class Model(RootModel[Any]):
+ ...
class Type(str, Enum):
@@ -61,42 +61,42 @@ class MountPoint(BaseModel):
Type: Type1 | None = Field(
None,
description="The mount type:\n\n- `bind` a mount of a file or directory from the host into the container.\n- `volume` a docker volume with the given `Name`.\n- `tmpfs` a `tmpfs`.\n- `npipe` a named pipe from the host into the container.\n",
- example="volume",
+ examples=["volume"],
)
Name: str | None = Field(
None,
description="Name is the name reference to the underlying data defined by `Source`\ne.g., the volume name.\n",
- example="myvolume",
+ examples=["myvolume"],
)
Source: str | None = Field(
None,
description="Source location of the mount.\n\nFor volumes, this contains the storage location of the volume (within\n`/var/lib/docker/volumes/`). For bind-mounts, and `npipe`, this contains\nthe source (host) part of the bind-mount. For `tmpfs` mount points, this\nfield is empty.\n",
- example="/var/lib/docker/volumes/myvolume/_data",
+ examples=["/var/lib/docker/volumes/myvolume/_data"],
)
Destination: str | None = Field(
None,
description="Destination is the path relative to the container root (`/`) where\nthe `Source` is mounted inside the container.\n",
- example="/usr/share/nginx/html/",
+ examples=["/usr/share/nginx/html/"],
)
Driver: str | None = Field(
None,
description="Driver is the volume driver used to create the volume (if it is a volume).\n",
- example="local",
+ examples=["local"],
)
Mode: str | None = Field(
None,
description='Mode is a comma separated list of options supplied by the user when\ncreating the bind/volume mount.\n\nThe default is platform-specific (`"z"` on Linux, empty on Windows).\n',
- example="z",
+ examples=["z"],
)
RW: bool | None = Field(
None,
description="Whether the mount is mounted writable (read-write).\n",
- example=True,
+ examples=[True],
)
Propagation: str | None = Field(
None,
description="Propagation describes how mounts are propagated from the host into the\nmount point, and vice-versa. Refer to the [Linux kernel documentation](https://www.kernel.org/doc/Documentation/filesystems/sharedsubtree.txt)\nfor details. This field is not used on Windows.\n",
- example="",
+ examples=[""],
)
@@ -115,15 +115,15 @@ class DeviceRequest(BaseModel):
A request for devices to be sent to device drivers
"""
- Driver: str | None = Field(None, example="nvidia")
- Count: int | None = Field(None, example=-1)
+ Driver: str | None = Field(None, examples=["nvidia"])
+ Count: int | None = Field(None, examples=[-1])
DeviceIDs: list[str] | None = Field(
- None, example=["0", "1", "GPU-fef8089b-4820-abfc-e83e-94318197576e"]
+ None, examples=[["0", "1", "GPU-fef8089b-4820-abfc-e83e-94318197576e"]]
)
Capabilities: list[list[str]] | None = Field(
None,
description="A list of capabilities; an OR list of AND lists of capabilities.\n",
- example=[["gpu", "nvidia", "compute"]],
+ examples=[[["gpu", "nvidia", "compute"]]],
)
Options: dict[str, str] | None = Field(
None,
@@ -154,8 +154,9 @@ class BindOptions(BaseModel):
Optional configuration for the `bind` type.
"""
- Propagation: Propagation | None = Field(
+ Propagation_: Propagation | None = Field(
None,
+ alias="Propagation",
description="A propagation mode with the value `[r]private`, `[r]shared`, or `[r]slave`.",
)
NonRecursive: bool | None = Field(
@@ -187,8 +188,8 @@ class VolumeOptions(BaseModel):
Labels: dict[str, str] | None = Field(
None, description="User-defined key/value metadata."
)
- DriverConfig: DriverConfig | None = Field(
- None, description="Map of driver specific options"
+ DriverConfig_: DriverConfig | None = Field(
+ None, alias="DriverConfig", description="Map of driver specific options"
)
@@ -221,14 +222,20 @@ class Mount(BaseModel):
None,
description="The consistency requirement for the mount: `default`, `consistent`, `cached`, or `delegated`.",
)
- BindOptions: BindOptions | None = Field(
- None, description="Optional configuration for the `bind` type."
+ BindOptions_: BindOptions | None = Field(
+ None,
+ alias="BindOptions",
+ description="Optional configuration for the `bind` type.",
)
- VolumeOptions: VolumeOptions | None = Field(
- None, description="Optional configuration for the `volume` type."
+ VolumeOptions_: VolumeOptions | None = Field(
+ None,
+ alias="VolumeOptions",
+ description="Optional configuration for the `volume` type.",
)
- TmpfsOptions: TmpfsOptions | None = Field(
- None, description="Optional configuration for the `tmpfs` type."
+ TmpfsOptions_: TmpfsOptions | None = Field(
+ None,
+ alias="TmpfsOptions",
+ description="Optional configuration for the `tmpfs` type.",
)
@@ -259,8 +266,9 @@ class RestartPolicy(BaseModel):
"""
- Name: Name | None = Field(
+ Name_: Name | None = Field(
None,
+ alias="Name",
description="- Empty string means not to restart\n- `no` Do not automatically restart\n- `always` Always restart\n- `unless-stopped` Restart always except when the user has manually stopped the container\n- `on-failure` Restart only when the container exit code is non-zero\n",
)
MaximumRetryCount: int | None = Field(
@@ -335,7 +343,7 @@ class Resources(BaseModel):
CpusetCpus: str | None = Field(
None,
description="CPUs in which to allow execution (e.g., `0-3`, `0,1`).\n",
- example="0-3",
+ examples=["0-3"],
)
CpusetMems: str | None = Field(
None,
@@ -354,7 +362,7 @@ class Resources(BaseModel):
KernelMemory: int | None = Field(
None,
description="Kernel memory limit in bytes.\n\n
\n\n> **Deprecated**: This field is deprecated as the kernel 5.4 deprecated\n> `kmem.limit_in_bytes`.\n",
- example=209715200,
+ examples=[209715200],
)
KernelMemoryTCP: int | None = Field(
None, description="Hard limit for kernel TCP buffer memory (in bytes)."
@@ -413,12 +421,12 @@ class Limit(BaseModel):
"""
- NanoCPUs: int | None = Field(None, example=4000000000)
- MemoryBytes: int | None = Field(None, example=8272408576)
+ NanoCPUs: int | None = Field(None, examples=[4000000000])
+ MemoryBytes: int | None = Field(None, examples=[8272408576])
Pids: int | None = Field(
0,
description="Limits the maximum number of PIDs in the container. Set `0` for unlimited.\n",
- example=100,
+ examples=[100],
)
@@ -433,24 +441,30 @@ class DiscreteResourceSpec(BaseModel):
class GenericResource(BaseModel):
- NamedResourceSpec: NamedResourceSpec | None = None
- DiscreteResourceSpec: DiscreteResourceSpec | None = None
+ NamedResourceSpec_: NamedResourceSpec | None = Field(
+ None, alias="NamedResourceSpec"
+ )
+ DiscreteResourceSpec_: DiscreteResourceSpec | None = Field(
+ None, alias="DiscreteResourceSpec"
+ )
-class GenericResources(BaseModel):
+class GenericResources(RootModel):
"""
User-defined resources can be either Integer resources (e.g, `SSD=3`) or
String resources (e.g, `GPU=UUID1`).
"""
- __root__: list[GenericResource] = Field(
+ root: list[GenericResource] = Field(
...,
description="User-defined resources can be either Integer resources (e.g, `SSD=3`) or\nString resources (e.g, `GPU=UUID1`).\n",
- example=[
- {"DiscreteResourceSpec": {"Kind": "SSD", "Value": 3}},
- {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID1"}},
- {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID2"}},
+ examples=[
+ [
+ {"DiscreteResourceSpec": {"Kind": "SSD", "Value": 3}},
+ {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID1"}},
+ {"NamedResourceSpec": {"Kind": "GPU", "Value": "UUID2"}},
+ ]
],
)
@@ -508,17 +522,17 @@ class HealthcheckResult(BaseModel):
Start: datetime | None = Field(
None,
description="Date and time at which this check started in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2020-01-04T10:44:24.496525531Z",
+ examples=["2020-01-04T10:44:24.496525531Z"],
)
End: str | None = Field(
None,
description="Date and time at which this check ended in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2020-01-04T10:45:21.364524523Z",
+ examples=["2020-01-04T10:45:21.364524523Z"],
)
ExitCode: int | None = Field(
None,
description="ExitCode meanings:\n\n- `0` healthy\n- `1` unhealthy\n- `2` reserved (considered unhealthy)\n- other values: error running probe\n",
- example=0,
+ examples=[0],
)
Output: str | None = Field(None, description="Output from last check")
@@ -560,8 +574,8 @@ class CgroupnsMode(str, Enum):
host = "host"
-class ConsoleSizeItem(BaseModel):
- __root__: int = Field(..., ge=0)
+class ConsoleSizeItem(RootModel):
+ root: int = Field(..., ge=0)
class Isolation(str, Enum):
@@ -591,7 +605,7 @@ class ContainerConfig(BaseModel):
Hostname: str | None = Field(
None,
description="The hostname to use for the container, as a valid RFC 1123 hostname.\n",
- example="439f4e91bd1d",
+ examples=["439f4e91bd1d"],
)
Domainname: str | None = Field(
None, description="The domain name to use for the container.\n"
@@ -609,7 +623,7 @@ class ContainerConfig(BaseModel):
ExposedPorts: dict[str, dict[str, Any]] | None = Field(
None,
description='An object mapping ports to an empty object in the form:\n\n`{"/": {}}`\n',
- example={"80/tcp": {}, "443/tcp": {}},
+ examples=[{"80/tcp": {}, "443/tcp": {}}],
)
Tty: bool | None = Field(
False,
@@ -622,21 +636,23 @@ class ContainerConfig(BaseModel):
Env: list[str] | None = Field(
None,
description='A list of environment variables to set inside the container in the\nform `["VAR=value", ...]`. A variable without `=` is removed from the\nenvironment, rather than to have an empty value.\n',
- example=["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],
+ examples=[
+ ["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"]
+ ],
)
Cmd: list[str] | None = Field(
None,
description="Command to run specified as a string or an array of strings.\n",
- example=["/bin/sh"],
+ examples=[["/bin/sh"]],
)
Healthcheck: HealthConfig | None = None
ArgsEscaped: bool | None = Field(
- False, description="Command is already escaped (Windows only)", example=False
+ False, description="Command is already escaped (Windows only)", examples=[False]
)
Image: str | None = Field(
None,
description="The name (or reference) of the image to use when creating the container,\nor which was used when the container was created.\n",
- example="example-image:1.0",
+ examples=["example-image:1.0"],
)
Volumes: dict[str, dict[str, Any]] | None = Field(
None,
@@ -645,12 +661,12 @@ class ContainerConfig(BaseModel):
WorkingDir: str | None = Field(
None,
description="The working directory for commands to run in.",
- example="/public/",
+ examples=["/public/"],
)
Entrypoint: list[str] | None = Field(
None,
description='The entry point for the container as a string or an array of strings.\n\nIf the array consists of exactly one empty string (`[""]`) then the\nentry point is reset to system default (i.e., the entry point used by\ndocker when there is no `ENTRYPOINT` instruction in the `Dockerfile`).\n',
- example=[],
+ examples=[[]],
)
NetworkDisabled: bool | None = Field(
None, description="Disable networking for the container."
@@ -659,20 +675,22 @@ class ContainerConfig(BaseModel):
OnBuild: list[str] | None = Field(
None,
description="`ONBUILD` metadata that were defined in the image's `Dockerfile`.\n",
- example=[],
+ examples=[[]],
)
Labels: dict[str, str] | None = Field(
None,
description="User-defined key/value metadata.",
- example={
- "com.example.some-label": "some-value",
- "com.example.some-other-label": "some-other-value",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-value",
+ "com.example.some-other-label": "some-other-value",
+ }
+ ],
)
StopSignal: str | None = Field(
None,
description="Signal to stop a container as a string or unsigned integer.\n",
- example="SIGTERM",
+ examples=["SIGTERM"],
)
StopTimeout: int | None = Field(
10, description="Timeout to stop a container in seconds."
@@ -680,7 +698,7 @@ class ContainerConfig(BaseModel):
Shell: list[str] | None = Field(
None,
description="Shell for when `RUN`, `CMD`, and `ENTRYPOINT` uses a shell.\n",
- example=["/bin/sh", "-c"],
+ examples=[["/bin/sh", "-c"]],
)
@@ -704,8 +722,7 @@ class PortMap(BaseModel):
"""
- class Config:
- extra = Extra.allow
+ model_config = ConfigDict(extra="allow")
class PortBinding(BaseModel):
@@ -718,12 +735,12 @@ class PortBinding(BaseModel):
HostIp: str | None = Field(
None,
description="Host IP address that the container's port is mapped to.",
- example="127.0.0.1",
+ examples=["127.0.0.1"],
)
HostPort: str | None = Field(
None,
description="Host port number that the container's port is mapped to.",
- example="4443",
+ examples=["4443"],
)
@@ -735,16 +752,18 @@ class GraphDriverData(BaseModel):
"""
Name: str = Field(
- ..., description="Name of the storage driver.", example="overlay2"
+ ..., description="Name of the storage driver.", examples=["overlay2"]
)
Data: dict[str, str] = Field(
...,
description="Low-level storage metadata, provided as key/value pairs.\n\nThis information is driver-specific, and depends on the storage-driver\nin use, and should be used for informational purposes only.\n",
- example={
- "MergedDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/merged",
- "UpperDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/diff",
- "WorkDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/work",
- },
+ examples=[
+ {
+ "MergedDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/merged",
+ "UpperDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/diff",
+ "WorkDir": "/var/lib/docker/overlay2/ef749362d13333e65fc95c572eb525abbe0052e16e086cb64bc3b98ae9aa6d74/work",
+ }
+ ],
)
@@ -754,12 +773,14 @@ class RootFS(BaseModel):
"""
- Type: str = Field(..., example="layers")
+ Type: str = Field(..., examples=["layers"])
Layers: list[str] | None = Field(
None,
- example=[
- "sha256:1834950e52ce4d5a88a1bbd131c537f4d0e56d10ff0dd69e66be3b7dfa9df7e6",
- "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef",
+ examples=[
+ [
+ "sha256:1834950e52ce4d5a88a1bbd131c537f4d0e56d10ff0dd69e66be3b7dfa9df7e6",
+ "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef",
+ ]
],
)
@@ -774,7 +795,7 @@ class Metadata(BaseModel):
LastTagTime: str | None = Field(
None,
description="Date and time at which the image was last tagged in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n\nThis information is only available if the image was tagged locally,\nand omitted otherwise.\n",
- example="2022-02-28T14:40:02.623929178Z",
+ examples=["2022-02-28T14:40:02.623929178Z"],
)
@@ -787,95 +808,103 @@ class ImageInspect(BaseModel):
Id: str | None = Field(
None,
description="ID is the content-addressable ID of an image.\n\nThis identifier is a content-addressable digest calculated from the\nimage's configuration (which includes the digests of layers used by\nthe image).\n\nNote that this digest differs from the `RepoDigests` below, which\nholds digests of image manifests that reference the image.\n",
- example="sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710",
+ examples=[
+ "sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710"
+ ],
)
RepoTags: list[str] | None = Field(
None,
description='List of image names/tags in the local image cache that reference this\nimage.\n\nMultiple image tags can refer to the same imagem and this list may be\nempty if no tags reference the image, in which case the image is\n"untagged", in which case it can still be referenced by its ID.\n',
- example=[
- "example:1.0",
- "example:latest",
- "example:stable",
- "internal.registry.example.com:5000/example:1.0",
+ examples=[
+ [
+ "example:1.0",
+ "example:latest",
+ "example:stable",
+ "internal.registry.example.com:5000/example:1.0",
+ ]
],
)
RepoDigests: list[str] | None = Field(
None,
description="List of content-addressable digests of locally available image manifests\nthat the image is referenced from. Multiple manifests can refer to the\nsame image.\n\nThese digests are usually only available if the image was either pulled\nfrom a registry, or if the image was pushed to a registry, which is when\nthe manifest is generated and its digest calculated.\n",
- example=[
- "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb",
- "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578",
+ examples=[
+ [
+ "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb",
+ "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578",
+ ]
],
)
Parent: str | None = Field(
None,
description="ID of the parent image.\n\nDepending on how the image was created, this field may be empty and\nis only set for images that were built/created locally. This field\nis empty if the image was pulled from an image registry.\n",
- example="",
+ examples=[""],
)
Comment: str | None = Field(
None,
description="Optional message that was set when committing or importing the image.\n",
- example="",
+ examples=[""],
)
Created: str | None = Field(
None,
description="Date and time at which the image was created, formatted in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2022-02-04T21:20:12.497794809Z",
+ examples=["2022-02-04T21:20:12.497794809Z"],
)
Container: str | None = Field(
None,
description="The ID of the container that was used to create the image.\n\nDepending on how the image was created, this field may be empty.\n",
- example="65974bc86f1770ae4bff79f651ebdbce166ae9aada632ee3fa9af3a264911735",
+ examples=["65974bc86f1770ae4bff79f651ebdbce166ae9aada632ee3fa9af3a264911735"],
)
- ContainerConfig: ContainerConfig | None = None
+ ContainerConfig_: ContainerConfig | None = Field(None, alias="ContainerConfig")
DockerVersion: str | None = Field(
None,
description="The version of Docker that was used to build the image.\n\nDepending on how the image was created, this field may be empty.\n",
- example="20.10.7",
+ examples=["20.10.7"],
)
Author: str | None = Field(
None,
description="Name of the author that was specified when committing the image, or as\nspecified through MAINTAINER (deprecated) in the Dockerfile.\n",
- example="",
+ examples=[""],
)
- Config_: ContainerConfig | None = Field(None, alias="Config") # type: ignore
+ Config_: ContainerConfig | None = Field(None, alias="Config")
Architecture: str | None = Field(
None,
description="Hardware CPU architecture that the image runs on.\n",
- example="arm",
+ examples=["arm"],
)
Variant: str | None = Field(
None,
description="CPU architecture variant (presently ARM-only).\n",
- example="v7",
+ examples=["v7"],
)
Os: str | None = Field(
None,
description="Operating System the image is built to run on.\n",
- example="linux",
+ examples=["linux"],
)
OsVersion: str | None = Field(
None,
description="Operating System version the image is built to run on (especially\nfor Windows).\n",
- example="",
+ examples=[""],
)
Size: int | None = Field(
None,
description="Total size of the image including all layers it is composed of.\n",
- example=1239828,
+ examples=[1239828],
)
VirtualSize: int | None = Field(
None,
description="Total size of the image including all layers it is composed of.\n\nIn versions of Docker before v1.10, this field was calculated from\nthe image itself and all of its parent images. Docker v1.10 and up\nstore images self-contained, and no longer use a parent-chain, making\nthis field an equivalent of the Size field.\n\nThis field is kept for backward compatibility, but may be removed in\na future version of the API.\n",
- example=1239828,
+ examples=[1239828],
)
GraphDriver: GraphDriverData | None = None
- RootFS: RootFS | None = Field(
+ RootFS_: RootFS | None = Field(
None,
+ alias="RootFS",
description="Information about the image's RootFS, including the layer IDs.\n",
)
- Metadata: Metadata | None = Field(
+ Metadata_: Metadata | None = Field(
None,
+ alias="Metadata",
description="Additional metadata of the image in the local cache. This information\nis local to the daemon, and not part of the image itself.\n",
)
@@ -884,63 +913,71 @@ class ImageSummary(BaseModel):
Id: str = Field(
...,
description="ID is the content-addressable ID of an image.\n\nThis identifier is a content-addressable digest calculated from the\nimage's configuration (which includes the digests of layers used by\nthe image).\n\nNote that this digest differs from the `RepoDigests` below, which\nholds digests of image manifests that reference the image.\n",
- example="sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710",
+ examples=[
+ "sha256:ec3f0931a6e6b6855d76b2d7b0be30e81860baccd891b2e243280bf1cd8ad710"
+ ],
)
ParentId: str = Field(
...,
description="ID of the parent image.\n\nDepending on how the image was created, this field may be empty and\nis only set for images that were built/created locally. This field\nis empty if the image was pulled from an image registry.\n",
- example="",
+ examples=[""],
)
RepoTags: list[str] = Field(
...,
description='List of image names/tags in the local image cache that reference this\nimage.\n\nMultiple image tags can refer to the same imagem and this list may be\nempty if no tags reference the image, in which case the image is\n"untagged", in which case it can still be referenced by its ID.\n',
- example=[
- "example:1.0",
- "example:latest",
- "example:stable",
- "internal.registry.example.com:5000/example:1.0",
+ examples=[
+ [
+ "example:1.0",
+ "example:latest",
+ "example:stable",
+ "internal.registry.example.com:5000/example:1.0",
+ ]
],
)
RepoDigests: list[str] = Field(
...,
description="List of content-addressable digests of locally available image manifests\nthat the image is referenced from. Multiple manifests can refer to the\nsame image.\n\nThese digests are usually only available if the image was either pulled\nfrom a registry, or if the image was pushed to a registry, which is when\nthe manifest is generated and its digest calculated.\n",
- example=[
- "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb",
- "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578",
+ examples=[
+ [
+ "example@sha256:afcc7f1ac1b49db317a7196c902e61c6c3c4607d63599ee1a82d702d249a0ccb",
+ "internal.registry.example.com:5000/example@sha256:b69959407d21e8a062e0416bf13405bb2b71ed7a84dde4158ebafacfa06f5578",
+ ]
],
)
Created: int = Field(
...,
description="Date and time at which the image was created as a Unix timestamp\n(number of seconds sinds EPOCH).\n",
- example="1644009612",
+ examples=["1644009612"],
)
Size: int = Field(
...,
description="Total size of the image including all layers it is composed of.\n",
- example=172064416,
+ examples=[172064416],
)
SharedSize: int = Field(
...,
description="Total size of image layers that are shared between this image and other\nimages.\n\nThis size is not calculated by default. `-1` indicates that the value\nhas not been set / calculated.\n",
- example=1239828,
+ examples=[1239828],
)
VirtualSize: int = Field(
...,
description="Total size of the image including all layers it is composed of.\n\nIn versions of Docker before v1.10, this field was calculated from\nthe image itself and all of its parent images. Docker v1.10 and up\nstore images self-contained, and no longer use a parent-chain, making\nthis field an equivalent of the Size field.\n\nThis field is kept for backward compatibility, but may be removed in\na future version of the API.\n",
- example=172064416,
+ examples=[172064416],
)
Labels: dict[str, str] = Field(
...,
description="User-defined key/value metadata.",
- example={
- "com.example.some-label": "some-value",
- "com.example.some-other-label": "some-other-value",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-value",
+ "com.example.some-other-label": "some-other-value",
+ }
+ ],
)
Containers: int = Field(
...,
description="Number of containers using this image. Includes both stopped and running\ncontainers.\n\nThis size is not calculated by default, and depends on which API endpoint\nis used. `-1` indicates that the value has not been set / calculated.\n",
- example=2,
+ examples=[2],
)
@@ -988,47 +1025,51 @@ class UsageData(BaseModel):
class Volume(BaseModel):
- Name: str = Field(..., description="Name of the volume.", example="tardis")
+ Name: str = Field(..., description="Name of the volume.", examples=["tardis"])
Driver: str = Field(
...,
description="Name of the volume driver used by the volume.",
- example="custom",
+ examples=["custom"],
)
Mountpoint: str = Field(
...,
description="Mount path of the volume on the host.",
- example="/var/lib/docker/volumes/tardis",
+ examples=["/var/lib/docker/volumes/tardis"],
)
CreatedAt: str | None = Field(
None,
description="Date/Time the volume was created.",
- example="2016-06-07T20:31:11.853781916Z",
+ examples=["2016-06-07T20:31:11.853781916Z"],
)
Status: dict[str, dict[str, Any]] | None = Field(
None,
description='Low-level details about the volume, provided by the volume driver.\nDetails are returned as a map with key/value pairs:\n`{"key":"value","key2":"value2"}`.\n\nThe `Status` field is optional, and is omitted if the volume driver\ndoes not support this feature.\n',
- example={"hello": "world"},
+ examples=[{"hello": "world"}],
)
Labels: dict[str, str] = Field(
...,
description="User-defined key/value metadata.",
- example={
- "com.example.some-label": "some-value",
- "com.example.some-other-label": "some-other-value",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-value",
+ "com.example.some-other-label": "some-other-value",
+ }
+ ],
)
- Scope: Scope = Field(
+ Scope_: Scope = Field(
...,
+ alias="Scope",
description="The level at which the volume exists. Either `global` for cluster-wide,\nor `local` for machine level.\n",
- example="local",
+ examples=["local"],
)
Options: dict[str, str] = Field(
...,
description="The driver specific options used when creating the volume.\n",
- example={"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"},
+ examples=[{"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}],
)
- UsageData: UsageData | None = Field(
+ UsageData_: UsageData | None = Field(
None,
+ alias="UsageData",
description="Usage details about the volume. This information is used by the\n`GET /system/df` endpoint, and omitted in other endpoints.\n",
)
@@ -1041,23 +1082,25 @@ class VolumeConfig(BaseModel):
Name: str | None = Field(
None,
description="The new volume's name. If not specified, Docker generates a name.\n",
- example="tardis",
+ examples=["tardis"],
)
Driver: str | None = Field(
- "local", description="Name of the volume driver to use.", example="custom"
+ "local", description="Name of the volume driver to use.", examples=["custom"]
)
DriverOpts: dict[str, str] | None = Field(
None,
description="A mapping of driver options and values. These options are\npassed directly to the driver and are driver specific.\n",
- example={"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"},
+ examples=[{"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}],
)
Labels: dict[str, str] | None = Field(
None,
description="User-defined key/value metadata.",
- example={
- "com.example.some-label": "some-value",
- "com.example.some-other-label": "some-other-value",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-value",
+ "com.example.some-other-label": "some-other-value",
+ }
+ ],
)
@@ -1099,43 +1142,45 @@ class BuildCache(BaseModel):
ID: str | None = Field(
None,
description="Unique ID of the build cache record.\n",
- example="ndlpt0hhvkqcdfkputsk4cq9c",
+ examples=["ndlpt0hhvkqcdfkputsk4cq9c"],
)
Parent: str | None = Field(
None,
description="ID of the parent build cache record.\n",
- example="hw53o5aio51xtltp5xjp8v7fx",
+ examples=["hw53o5aio51xtltp5xjp8v7fx"],
)
Type: Type4 | None = Field(
- None, description="Cache record type.\n", example="regular"
+ None, description="Cache record type.\n", examples=["regular"]
)
Description: str | None = Field(
None,
description="Description of the build-step that produced the build cache.\n",
- example="mount / from exec /bin/sh -c echo 'Binary::apt::APT::Keep-Downloaded-Packages \"true\";' > /etc/apt/apt.conf.d/keep-cache",
+ examples=[
+ "mount / from exec /bin/sh -c echo 'Binary::apt::APT::Keep-Downloaded-Packages \"true\";' > /etc/apt/apt.conf.d/keep-cache"
+ ],
)
InUse: bool | None = Field(
- None, description="Indicates if the build cache is in use.\n", example=False
+ None, description="Indicates if the build cache is in use.\n", examples=[False]
)
Shared: bool | None = Field(
- None, description="Indicates if the build cache is shared.\n", example=True
+ None, description="Indicates if the build cache is shared.\n", examples=[True]
)
Size: int | None = Field(
None,
description="Amount of disk space used by the build cache (in bytes).\n",
- example=51,
+ examples=[51],
)
CreatedAt: str | None = Field(
None,
description="Date and time at which the build cache was created in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2016-08-18T10:44:24.496525531Z",
+ examples=["2016-08-18T10:44:24.496525531Z"],
)
LastUsedAt: str | None = Field(
None,
description="Date and time at which the build cache was last used in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2017-08-09T07:09:37.632105588Z",
+ examples=["2017-08-09T07:09:37.632105588Z"],
)
- UsageCount: int | None = Field(None, example=26)
+ UsageCount: int | None = Field(None, examples=[26])
class ImageID(BaseModel):
@@ -1178,28 +1223,30 @@ class EndpointIPAMConfig(BaseModel):
"""
- IPv4Address: str | None = Field(None, example="172.20.30.33")
- IPv6Address: str | None = Field(None, example="2001:db8:abcd::3033")
+ IPv4Address: str | None = Field(None, examples=["172.20.30.33"])
+ IPv6Address: str | None = Field(None, examples=["2001:db8:abcd::3033"])
LinkLocalIPs: list[str] | None = Field(
- None, example=["169.254.34.68", "fe80::3468"]
+ None, examples=[["169.254.34.68", "fe80::3468"]]
)
class PluginMount(BaseModel):
- Name: str = Field(..., example="some-mount")
- Description: str = Field(..., example="This is a mount that's used by the plugin.")
+ Name: str = Field(..., examples=["some-mount"])
+ Description: str = Field(
+ ..., examples=["This is a mount that's used by the plugin."]
+ )
Settable: list[str]
- Source: str = Field(..., example="/var/lib/docker/plugins/")
- Destination: str = Field(..., example="/mnt/state")
- Type: str = Field(..., example="bind")
- Options: list[str] = Field(..., example=["rbind", "rw"])
+ Source: str = Field(..., examples=["/var/lib/docker/plugins/"])
+ Destination: str = Field(..., examples=["/mnt/state"])
+ Type: str = Field(..., examples=["bind"])
+ Options: list[str] = Field(..., examples=[["rbind", "rw"]])
class PluginDevice(BaseModel):
Name: str
Description: str
Settable: list[str]
- Path: str = Field(..., example="/dev/fuse")
+ Path: str = Field(..., examples=["/dev/fuse"])
class PluginEnv(BaseModel):
@@ -1222,9 +1269,9 @@ class PluginPrivilege(BaseModel):
"""
- Name: str | None = Field(None, example="network")
+ Name: str | None = Field(None, examples=["network"])
Description: str | None = None
- Value: list[str] | None = Field(None, example=["host"])
+ Value: list[str] | None = Field(None, examples=[["host"]])
class Settings(BaseModel):
@@ -1233,7 +1280,7 @@ class Settings(BaseModel):
"""
Mounts: list[PluginMount]
- Env: list[str] = Field(..., example=["DEBUG=0"])
+ Env: list[str] = Field(..., examples=[["DEBUG=0"]])
Args: list[str]
Devices: list[PluginDevice]
@@ -1252,44 +1299,49 @@ class Interface(BaseModel):
The interface between Docker and the plugin
"""
- Types: list[PluginInterfaceType] = Field(..., example=["docker.volumedriver/1.0"])
- Socket: str = Field(..., example="plugins.sock")
- ProtocolScheme: ProtocolScheme | None = Field(
+ Types: list[PluginInterfaceType] = Field(
+ ..., examples=[["docker.volumedriver/1.0"]]
+ )
+ Socket: str = Field(..., examples=["plugins.sock"])
+ ProtocolScheme_: ProtocolScheme | None = Field(
None,
+ alias="ProtocolScheme",
description="Protocol to use for clients connecting to the plugin.",
- example="some.protocol/v1.0",
+ examples=["some.protocol/v1.0"],
)
class User(BaseModel):
- UID: int | None = Field(None, example=1000)
- GID: int | None = Field(None, example=1000)
+ UID: int | None = Field(None, examples=[1000])
+ GID: int | None = Field(None, examples=[1000])
class Network1(BaseModel):
- Type: str = Field(..., example="host")
+ Type: str = Field(..., examples=["host"])
class Linux(BaseModel):
- Capabilities: list[str] = Field(..., example=["CAP_SYS_ADMIN", "CAP_SYSLOG"])
- AllowAllDevices: bool = Field(..., example=False)
+ Capabilities: list[str] = Field(..., examples=[["CAP_SYS_ADMIN", "CAP_SYSLOG"]])
+ AllowAllDevices: bool = Field(..., examples=[False])
Devices: list[PluginDevice]
class Args(BaseModel):
- Name: str = Field(..., example="args")
- Description: str = Field(..., example="command line arguments")
+ Name: str = Field(..., examples=["args"])
+ Description: str = Field(..., examples=["command line arguments"])
Settable: list[str]
Value: list[str]
class Rootfs(BaseModel):
- type: str | None = Field(None, example="layers")
+ type: str | None = Field(None, examples=["layers"])
diff_ids: list[str] | None = Field(
None,
- example=[
- "sha256:675532206fbf3030b8458f88d6e26d4eb1577688a25efec97154c94e8b6b4887",
- "sha256:e216a057b1cb1efc11f8a268f37ef62083e70b1b38323ba252e25ac88904a7e8",
+ examples=[
+ [
+ "sha256:675532206fbf3030b8458f88d6e26d4eb1577688a25efec97154c94e8b6b4887",
+ "sha256:e216a057b1cb1efc11f8a268f37ef62083e70b1b38323ba252e25ac88904a7e8",
+ ]
],
)
@@ -1302,33 +1354,37 @@ class Config(BaseModel):
DockerVersion: str | None = Field(
None,
description="Docker Version used to create the plugin",
- example="17.06.0-ce",
+ examples=["17.06.0-ce"],
)
- Description: str = Field(..., example="A sample volume plugin for Docker")
- Documentation: str = Field(..., example="/engine/extend/plugins/")
- Interface: Interface = Field(
- ..., description="The interface between Docker and the plugin"
+ Description: str = Field(..., examples=["A sample volume plugin for Docker"])
+ Documentation: str = Field(..., examples=["/engine/extend/plugins/"])
+ Interface_: Interface = Field(
+ ...,
+ alias="Interface",
+ description="The interface between Docker and the plugin",
)
Entrypoint: list[str] = Field(
- ..., example=["/usr/bin/sample-volume-plugin", "/data"]
+ ..., examples=[["/usr/bin/sample-volume-plugin", "/data"]]
)
- WorkDir: str = Field(..., example="/bin/")
- User: User | None = None
+ WorkDir: str = Field(..., examples=["/bin/"])
+ User_: User | None = Field(None, alias="User")
Network: Network1
Linux: Linux
- PropagatedMount: str = Field(..., example="/mnt/volumes")
- IpcHost: bool = Field(..., example=False)
- PidHost: bool = Field(..., example=False)
+ PropagatedMount: str = Field(..., examples=["/mnt/volumes"])
+ IpcHost: bool = Field(..., examples=[False])
+ PidHost: bool = Field(..., examples=[False])
Mounts: list[PluginMount]
Env: list[PluginEnv] = Field(
...,
- example=[
- {
- "Name": "DEBUG",
- "Description": "If set, prints debug messages",
- "Settable": None,
- "Value": "0",
- }
+ examples=[
+ [
+ {
+ "Name": "DEBUG",
+ "Description": "If set, prints debug messages",
+ "Settable": None,
+ "Value": "0",
+ }
+ ]
],
)
Args: Args
@@ -1341,21 +1397,22 @@ class Plugin(BaseModel):
"""
Id: str | None = Field(
- None, example="5724e2c8652da337ab2eedd19fc6fc0ec908e4bd907c7421bf6a8dfc70c4c078"
+ None,
+ examples=["5724e2c8652da337ab2eedd19fc6fc0ec908e4bd907c7421bf6a8dfc70c4c078"],
)
- Name: str = Field(..., example="tiborvass/sample-volume-plugin")
+ Name: str = Field(..., examples=["tiborvass/sample-volume-plugin"])
Enabled: bool = Field(
...,
description="True if the plugin is running. False if the plugin is not running, only installed.",
- example=True,
+ examples=[True],
)
- Settings: Settings = Field(
- ..., description="Settings that can be modified by users."
+ Settings_: Settings = Field(
+ ..., alias="Settings", description="Settings that can be modified by users."
)
PluginReference: str | None = Field(
None,
description="plugin remote reference used to push/pull the plugin",
- example="localhost:5000/tiborvass/sample-volume-plugin:latest",
+ examples=["localhost:5000/tiborvass/sample-volume-plugin:latest"],
)
Config_: Config = Field(..., alias="Config", description="The config of a plugin.")
@@ -1375,7 +1432,7 @@ class ObjectVersion(BaseModel):
"""
- Index: int | None = Field(None, example=373531)
+ Index: int | None = Field(None, examples=[373531])
class Role(str, Enum):
@@ -1398,13 +1455,20 @@ class Availability(str, Enum):
class NodeSpec(BaseModel):
- Name: str | None = Field(None, description="Name for the node.", example="my-node")
+ Name: str | None = Field(
+ None, description="Name for the node.", examples=["my-node"]
+ )
Labels: dict[str, str] | None = Field(
None, description="User-defined key/value metadata."
)
- Role: Role | None = Field(None, description="Role of the node.", example="manager")
- Availability: Availability | None = Field(
- None, description="Availability of the node.", example="active"
+ Role_: Role | None = Field(
+ None, alias="Role", description="Role of the node.", examples=["manager"]
+ )
+ Availability_: Availability | None = Field(
+ None,
+ alias="Availability",
+ description="Availability of the node.",
+ examples=["active"],
)
@@ -1417,12 +1481,12 @@ class Platform(BaseModel):
Architecture: str | None = Field(
None,
description="Architecture represents the hardware architecture (for example,\n`x86_64`).\n",
- example="x86_64",
+ examples=["x86_64"],
)
OS: str | None = Field(
None,
description="OS represents the Operating System (for example, `linux` or `windows`).\n",
- example="linux",
+ examples=["linux"],
)
@@ -1436,29 +1500,31 @@ class EngineDescription(BaseModel):
EngineDescription provides information about an engine.
"""
- EngineVersion: str | None = Field(None, example="17.06.0")
- Labels: dict[str, str] | None = Field(None, example={"foo": "bar"})
+ EngineVersion: str | None = Field(None, examples=["17.06.0"])
+ Labels: dict[str, str] | None = Field(None, examples=[{"foo": "bar"}])
Plugins: list[Plugin1] | None = Field(
None,
- example=[
- {"Type": "Log", "Name": "awslogs"},
- {"Type": "Log", "Name": "fluentd"},
- {"Type": "Log", "Name": "gcplogs"},
- {"Type": "Log", "Name": "gelf"},
- {"Type": "Log", "Name": "journald"},
- {"Type": "Log", "Name": "json-file"},
- {"Type": "Log", "Name": "logentries"},
- {"Type": "Log", "Name": "splunk"},
- {"Type": "Log", "Name": "syslog"},
- {"Type": "Network", "Name": "bridge"},
- {"Type": "Network", "Name": "host"},
- {"Type": "Network", "Name": "ipvlan"},
- {"Type": "Network", "Name": "macvlan"},
- {"Type": "Network", "Name": "null"},
- {"Type": "Network", "Name": "overlay"},
- {"Type": "Volume", "Name": "local"},
- {"Type": "Volume", "Name": "localhost:5000/vieux/sshfs:latest"},
- {"Type": "Volume", "Name": "vieux/sshfs:latest"},
+ examples=[
+ [
+ {"Type": "Log", "Name": "awslogs"},
+ {"Type": "Log", "Name": "fluentd"},
+ {"Type": "Log", "Name": "gcplogs"},
+ {"Type": "Log", "Name": "gelf"},
+ {"Type": "Log", "Name": "journald"},
+ {"Type": "Log", "Name": "json-file"},
+ {"Type": "Log", "Name": "logentries"},
+ {"Type": "Log", "Name": "splunk"},
+ {"Type": "Log", "Name": "syslog"},
+ {"Type": "Network", "Name": "bridge"},
+ {"Type": "Network", "Name": "host"},
+ {"Type": "Network", "Name": "ipvlan"},
+ {"Type": "Network", "Name": "macvlan"},
+ {"Type": "Network", "Name": "null"},
+ {"Type": "Network", "Name": "overlay"},
+ {"Type": "Volume", "Name": "local"},
+ {"Type": "Volume", "Name": "localhost:5000/vieux/sshfs:latest"},
+ {"Type": "Volume", "Name": "vieux/sshfs:latest"},
+ ]
],
)
@@ -1512,7 +1578,7 @@ class Orchestration(BaseModel):
TaskHistoryRetentionLimit: int | None = Field(
None,
description="The number of historic tasks to keep per instance or node. If\nnegative, never remove completed or failed tasks.\n",
- example=10,
+ examples=[10],
)
@@ -1522,7 +1588,9 @@ class Raft(BaseModel):
"""
SnapshotInterval: int | None = Field(
- None, description="The number of log entries between snapshots.", example=10000
+ None,
+ description="The number of log entries between snapshots.",
+ examples=[10000],
)
KeepOldSnapshots: int | None = Field(
None,
@@ -1531,17 +1599,17 @@ class Raft(BaseModel):
LogEntriesForSlowFollowers: int | None = Field(
None,
description="The number of log entries to keep around to sync up slow followers\nafter a snapshot is created.\n",
- example=500,
+ examples=[500],
)
ElectionTick: int | None = Field(
None,
description="The number of ticks that a follower will wait for a message from\nthe leader before becoming a candidate and starting an election.\n`ElectionTick` must be greater than `HeartbeatTick`.\n\nA tick currently defaults to one second, so these translate\ndirectly to seconds currently, but this is NOT guaranteed.\n",
- example=3,
+ examples=[3],
)
HeartbeatTick: int | None = Field(
None,
description="The number of ticks between heartbeats. Every HeartbeatTick ticks,\nthe leader will send a heartbeat to the followers.\n\nA tick currently defaults to one second, so these translate\ndirectly to seconds currently, but this is NOT guaranteed.\n",
- example=1,
+ examples=[1],
)
@@ -1553,7 +1621,7 @@ class Dispatcher(BaseModel):
HeartbeatPeriod: int | None = Field(
None,
description="The delay for an agent to send a heartbeat to the dispatcher.\n",
- example=5000000000,
+ examples=[5000000000],
)
@@ -1568,8 +1636,9 @@ class Protocol(str, Enum):
class ExternalCA(BaseModel):
- Protocol: Protocol | None = Field(
+ Protocol_: Protocol | None = Field(
Protocol.cfssl,
+ alias="Protocol",
description="Protocol for communication with the external CA (currently\nonly `cfssl` is supported).\n",
)
URL: str | None = Field(
@@ -1593,7 +1662,7 @@ class CAConfig(BaseModel):
NodeCertExpiry: int | None = Field(
None,
description="The duration node certificates are issued for.",
- example=7776000000000000,
+ examples=[7776000000000000],
)
ExternalCAs: list[ExternalCA] | None = Field(
None,
@@ -1621,7 +1690,7 @@ class EncryptionConfig(BaseModel):
AutoLockManagers: bool | None = Field(
None,
description="If set, generate a key and use it to lock data stored on the\nmanagers.\n",
- example=False,
+ examples=[False],
)
@@ -1638,12 +1707,12 @@ class LogDriver(BaseModel):
Name: str | None = Field(
None,
description="The log driver to use as a default for new tasks.\n",
- example="json-file",
+ examples=["json-file"],
)
Options: dict[str, str] | None = Field(
None,
description="Driver-specific options for the selectd log driver, specified\nas key/value pairs.\n",
- example={"max-file": "10", "max-size": "100m"},
+ examples=[{"max-file": "10", "max-size": "100m"}],
)
@@ -1652,8 +1721,9 @@ class TaskDefaults(BaseModel):
Defaults for creating tasks in this cluster.
"""
- LogDriver: LogDriver | None = Field(
+ LogDriver_: LogDriver | None = Field(
None,
+ alias="LogDriver",
description="The log driver to use for tasks created in the orchestrator if\nunspecified by a service.\n\nUpdating this value only affects new tasks. Existing tasks continue\nto use their previously configured log driver until recreated.\n",
)
@@ -1663,26 +1733,38 @@ class SwarmSpec(BaseModel):
User modifiable swarm configuration.
"""
- Name: str | None = Field(None, description="Name of the swarm.", example="default")
+ Name: str | None = Field(
+ None, description="Name of the swarm.", examples=["default"]
+ )
Labels: dict[str, str] | None = Field(
None,
description="User-defined key/value metadata.",
- example={
- "com.example.corp.type": "production",
- "com.example.corp.department": "engineering",
- },
+ examples=[
+ {
+ "com.example.corp.type": "production",
+ "com.example.corp.department": "engineering",
+ }
+ ],
+ )
+ Orchestration_: Orchestration | None = Field(
+ None, alias="Orchestration", description="Orchestration configuration."
)
- Orchestration: Orchestration | None = Field(
- None, description="Orchestration configuration."
+ Raft_: Raft | None = Field(None, alias="Raft", description="Raft configuration.")
+ Dispatcher_: Dispatcher | None = Field(
+ None, alias="Dispatcher", description="Dispatcher configuration."
)
- Raft: Raft | None = Field(None, description="Raft configuration.")
- Dispatcher: Dispatcher | None = Field(None, description="Dispatcher configuration.")
- CAConfig: CAConfig | None = Field(None, description="CA configuration.")
- EncryptionConfig: EncryptionConfig | None = Field(
- None, description="Parameters related to encryption-at-rest."
+ CAConfig_: CAConfig | None = Field(
+ None, alias="CAConfig", description="CA configuration."
)
- TaskDefaults: TaskDefaults | None = Field(
- None, description="Defaults for creating tasks in this cluster."
+ EncryptionConfig_: EncryptionConfig | None = Field(
+ None,
+ alias="EncryptionConfig",
+ description="Parameters related to encryption-at-rest.",
+ )
+ TaskDefaults_: TaskDefaults | None = Field(
+ None,
+ alias="TaskDefaults",
+ description="Defaults for creating tasks in this cluster.",
)
@@ -1694,30 +1776,30 @@ class ClusterInfo(BaseModel):
"""
ID: str | None = Field(
- None, description="The ID of the swarm.", example="abajmipo7b4xz5ip2nrla6b11"
+ None, description="The ID of the swarm.", examples=["abajmipo7b4xz5ip2nrla6b11"]
)
Version: ObjectVersion | None = None
CreatedAt: str | None = Field(
None,
description="Date and time at which the swarm was initialised in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2016-08-18T10:44:24.496525531Z",
+ examples=["2016-08-18T10:44:24.496525531Z"],
)
UpdatedAt: str | None = Field(
None,
description="Date and time at which the swarm was last updated in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2017-08-09T07:09:37.632105588Z",
+ examples=["2017-08-09T07:09:37.632105588Z"],
)
Spec: SwarmSpec | None = None
- TLSInfo: TLSInfo | None = None
+ TLSInfo_: TLSInfo | None = Field(None, alias="TLSInfo")
RootRotationInProgress: bool | None = Field(
None,
description="Whether there is currently a root CA rotation in progress for the swarm\n",
- example=False,
+ examples=[False],
)
DataPathPort: int | None = Field(
4789,
description="DataPathPort specifies the data path port number for data traffic.\nAcceptable port range is 1024 to 49151.\nIf no port is set or is set to 0, the default port (4789) is used.\n",
- example=4789,
+ examples=[4789],
)
DefaultAddrPool: list[str] | None = Field(
None,
@@ -1726,7 +1808,7 @@ class ClusterInfo(BaseModel):
SubnetSize: int | None = Field(
24,
description="SubnetSize specifies the subnet size of the networks created from the\ndefault subnet pool.\n",
- example=24,
+ examples=[24],
le=29,
)
@@ -1740,17 +1822,21 @@ class JoinTokens(BaseModel):
Worker: str | None = Field(
None,
description="The token workers can use to join the swarm.\n",
- example="SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-1awxwuwd3z9j1z3puu7rcgdbx",
+ examples=[
+ "SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-1awxwuwd3z9j1z3puu7rcgdbx"
+ ],
)
Manager: str | None = Field(
None,
description="The token managers can use to join the swarm.\n",
- example="SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-7p73s1dx5in4tatdymyhg9hu2",
+ examples=[
+ "SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-7p73s1dx5in4tatdymyhg9hu2"
+ ],
)
class Swarm(ClusterInfo):
- JoinTokens: JoinTokens | None = None
+ JoinTokens_: JoinTokens | None = Field(None, alias="JoinTokens")
class PluginSpec(BaseModel):
@@ -1785,12 +1871,12 @@ class CredentialSpec(BaseModel):
None,
alias="Config",
description="Load credential spec from a Swarm Config with the given ID.\nThe specified config must also be present in the Configs\nfield with the Runtime property set.\n\n
\n\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n",
- example="0bt9dmxjvjiqermk6xrop3ekq",
+ examples=["0bt9dmxjvjiqermk6xrop3ekq"],
)
File: str | None = Field(
None,
description="Load credential spec from this file. The file is read by\nthe daemon, and must be present in the `CredentialSpecs`\nsubdirectory in the docker data directory, which defaults\nto `C:\\ProgramData\\Docker\\` on Windows.\n\nFor example, specifying `spec.json` loads\n`C:\\ProgramData\\Docker\\CredentialSpecs\\spec.json`.\n\n
\n\n> **Note**: `CredentialSpec.File`, `CredentialSpec.Registry`,\n> and `CredentialSpec.Config` are mutually exclusive.\n",
- example="spec.json",
+ examples=["spec.json"],
)
Registry: str | None = Field(
None,
@@ -1815,11 +1901,13 @@ class Privileges(BaseModel):
Security options for the container
"""
- CredentialSpec: CredentialSpec | None = Field(
- None, description="CredentialSpec for managed service account (Windows only)"
+ CredentialSpec_: CredentialSpec | None = Field(
+ None,
+ alias="CredentialSpec",
+ description="CredentialSpec for managed service account (Windows only)",
)
- SELinuxContext: SELinuxContext | None = Field(
- None, description="SELinux labels of the container"
+ SELinuxContext_: SELinuxContext | None = Field(
+ None, alias="SELinuxContext", description="SELinux labels of the container"
)
@@ -1859,8 +1947,9 @@ class File(BaseModel):
class Secret(BaseModel):
- File: File | None = Field(
+ File_: File | None = Field(
None,
+ alias="File",
description="File represents a specific target that is backed by a file.\n",
)
SecretID: str | None = Field(
@@ -1945,8 +2034,8 @@ class ContainerSpec(BaseModel):
None,
description="A list of additional groups that the container process will run as.\n",
)
- Privileges: Privileges | None = Field(
- None, description="Security options for the container"
+ Privileges_: Privileges | None = Field(
+ None, alias="Privileges", description="Security options for the container"
)
TTY: bool | None = Field(
None, description="Whether a pseudo-TTY should be allocated."
@@ -1969,8 +2058,9 @@ class ContainerSpec(BaseModel):
None,
description="A list of hostname/IP mappings to add to the container's `hosts`\nfile. The format of extra hosts is specified in the\n[hosts(5)](http://man7.org/linux/man-pages/man5/hosts.5.html)\nman page:\n\n IP_address canonical_hostname [aliases...]\n",
)
- DNSConfig: DNSConfig | None = Field(
+ DNSConfig_: DNSConfig | None = Field(
None,
+ alias="DNSConfig",
description="Specification for DNS related configurations in resolver configuration\nfile (`resolv.conf`).\n",
)
Secrets: list[Secret] | None = Field(
@@ -1981,8 +2071,9 @@ class ContainerSpec(BaseModel):
None,
description="Configs contains references to zero or more configs that will be\nexposed to the service.\n",
)
- Isolation: Isolation | None = Field(
+ Isolation_: Isolation | None = Field(
None,
+ alias="Isolation",
description="Isolation technology of the containers running the service.\n(Windows only)\n",
)
Init: bool | None = Field(
@@ -1996,12 +2087,12 @@ class ContainerSpec(BaseModel):
CapabilityAdd: list[str] | None = Field(
None,
description="A list of kernel capabilities to add to the default set\nfor the container.\n",
- example=["CAP_NET_RAW", "CAP_SYS_ADMIN", "CAP_SYS_CHROOT", "CAP_SYSLOG"],
+ examples=[["CAP_NET_RAW", "CAP_SYS_ADMIN", "CAP_SYS_CHROOT", "CAP_SYSLOG"]],
)
CapabilityDrop: list[str] | None = Field(
None,
description="A list of kernel capabilities to drop from the default set\nfor the container.\n",
- example=["CAP_NET_RAW"],
+ examples=[["CAP_NET_RAW"]],
)
Ulimits: list[Ulimit1] | None = Field(
None,
@@ -2045,7 +2136,9 @@ class RestartPolicy1(BaseModel):
"""
- Condition: Condition | None = Field(None, description="Condition for restart.")
+ Condition_: Condition | None = Field(
+ None, alias="Condition", description="Condition for restart."
+ )
Delay: int | None = Field(None, description="Delay between restart attempts.")
MaxAttempts: int | None = Field(
0,
@@ -2064,27 +2157,31 @@ class Spread(BaseModel):
class Preference(BaseModel):
- Spread: Spread | None = None
+ Spread_: Spread | None = Field(None, alias="Spread")
class Placement(BaseModel):
Constraints: list[str] | None = Field(
None,
description="An array of constraint expressions to limit the set of nodes where\na task can be scheduled. Constraint expressions can either use a\n_match_ (`==`) or _exclude_ (`!=`) rule. Multiple constraints find\nnodes that satisfy every expression (AND match). Constraints can\nmatch node or Docker Engine labels as follows:\n\nnode attribute | matches | example\n---------------------|--------------------------------|-----------------------------------------------\n`node.id` | Node ID | `node.id==2ivku8v2gvtg4`\n`node.hostname` | Node hostname | `node.hostname!=node-2`\n`node.role` | Node role (`manager`/`worker`) | `node.role==manager`\n`node.platform.os` | Node operating system | `node.platform.os==windows`\n`node.platform.arch` | Node architecture | `node.platform.arch==x86_64`\n`node.labels` | User-defined node labels | `node.labels.security==high`\n`engine.labels` | Docker Engine's labels | `engine.labels.operatingsystem==ubuntu-14.04`\n\n`engine.labels` apply to Docker Engine labels like operating system,\ndrivers, etc. Swarm administrators add `node.labels` for operational\npurposes by using the [`node update endpoint`](#operation/NodeUpdate).\n",
- example=[
- "node.hostname!=node3.corp.example.com",
- "node.role!=manager",
- "node.labels.type==production",
- "node.platform.os==linux",
- "node.platform.arch==x86_64",
+ examples=[
+ [
+ "node.hostname!=node3.corp.example.com",
+ "node.role!=manager",
+ "node.labels.type==production",
+ "node.platform.os==linux",
+ "node.platform.arch==x86_64",
+ ]
],
)
Preferences: list[Preference] | None = Field(
None,
description="Preferences provide a way to make the scheduler aware of factors\nsuch as topology. They are provided in order from highest to\nlowest precedence.\n",
- example=[
- {"Spread": {"SpreadDescriptor": "node.labels.datacenter"}},
- {"Spread": {"SpreadDescriptor": "node.labels.rack"}},
+ examples=[
+ [
+ {"Spread": {"SpreadDescriptor": "node.labels.datacenter"}},
+ {"Spread": {"SpreadDescriptor": "node.labels.rack"}},
+ ]
],
)
MaxReplicas: int | None = Field(
@@ -2138,7 +2235,7 @@ class Status1(BaseModel):
State: TaskState | None = None
Message: str | None = None
Err: str | None = None
- ContainerStatus: ContainerStatus | None = None
+ ContainerStatus_: ContainerStatus | None = Field(None, alias="ContainerStatus")
class Replicated(BaseModel):
@@ -2166,10 +2263,11 @@ class Mode(BaseModel):
Scheduling mode for the service.
"""
- Replicated: Replicated | None = None
+ Replicated_: Replicated | None = Field(None, alias="Replicated")
Global: dict[str, Any] | None = None
- ReplicatedJob: ReplicatedJob | None = Field(
+ ReplicatedJob_: ReplicatedJob | None = Field(
None,
+ alias="ReplicatedJob",
description="The mode used for services with a finite number of tasks that run\nto a completed state.\n",
)
GlobalJob: dict[str, Any] | None = Field(
@@ -2214,8 +2312,9 @@ class UpdateConfig(BaseModel):
Delay: int | None = Field(
None, description="Amount of time between updates, in nanoseconds."
)
- FailureAction: FailureAction | None = Field(
+ FailureAction_: FailureAction | None = Field(
None,
+ alias="FailureAction",
description="Action to take if an updated task fails to run, or stops running\nduring the update.\n",
)
Monitor: int | None = Field(
@@ -2226,8 +2325,9 @@ class UpdateConfig(BaseModel):
0,
description="The fraction of tasks that may fail during an update before the\nfailure action is invoked, specified as a floating point number\nbetween 0 and 1.\n",
)
- Order: Order | None = Field(
+ Order_: Order | None = Field(
None,
+ alias="Order",
description="The order of operations when rolling out an updated task. Either\nthe old task is shut down before the new task is started, or the\nnew task is started before the old task is shut down.\n",
)
@@ -2268,8 +2368,9 @@ class RollbackConfig(BaseModel):
0,
description="The fraction of tasks that may fail during a rollback before the\nfailure action is invoked, specified as a floating point number\nbetween 0 and 1.\n",
)
- Order: Order | None = Field(
+ Order_: Order | None = Field(
None,
+ alias="Order",
description="The order of operations when rolling back a task. Either the old\ntask is shut down before the new task is started, or the new task\nis started before the old task is shut down.\n",
)
@@ -2297,10 +2398,11 @@ class EndpointPortConfig(BaseModel):
Protocol: Type | None = None
TargetPort: int | None = Field(None, description="The port inside the container.")
PublishedPort: int | None = Field(None, description="The port on the swarm hosts.")
- PublishMode: PublishMode | None = Field(
+ PublishMode_: PublishMode | None = Field(
PublishMode.ingress,
+ alias="PublishMode",
description='The mode in which port is published.\n\n
\n\n- "ingress" makes the target port accessible on every node,\n regardless of whether there is a task for the service running on\n that node or not.\n- "host" bypasses the routing mesh and publish the port directly on\n the swarm node where that service is running.\n',
- example="ingress",
+ examples=["ingress"],
)
@@ -2351,7 +2453,7 @@ class UpdateStatus(BaseModel):
The status of a service update.
"""
- State: State | None = None
+ State_: State | None = Field(None, alias="State")
StartedAt: str | None = None
CompletedAt: str | None = None
Message: str | None = None
@@ -2367,12 +2469,12 @@ class ServiceStatus(BaseModel):
RunningTasks: int | None = Field(
None,
description="The number of tasks for the service currently in the Running state.\n",
- example=7,
+ examples=[7],
)
DesiredTasks: int | None = Field(
None,
description="The number of tasks for the service desired to be running.\nFor replicated services, this is the replica count from the\nservice spec. For global services, this is computed by taking\ncount of all tasks for the service with a Desired State other\nthan Shutdown.\n",
- example=10,
+ examples=[10],
)
CompletedTasks: int | None = Field(
None,
@@ -2425,10 +2527,12 @@ class Driver(BaseModel):
options: dict[str, str] | None = Field(
None,
description="Key/value map of driver-specific options.",
- example={
- "OptionA": "value for driver-specific option A",
- "OptionB": "value for driver-specific option B",
- },
+ examples=[
+ {
+ "OptionA": "value for driver-specific option A",
+ "OptionB": "value for driver-specific option B",
+ }
+ ],
)
@@ -2437,15 +2541,17 @@ class SecretSpec(BaseModel):
labels: dict[str, str] | None = Field(
None,
description="User-defined key/value metadata.",
- example={
- "com.example.some-label": "some-value",
- "com.example.some-other-label": "some-other-value",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-value",
+ "com.example.some-other-label": "some-other-value",
+ }
+ ],
)
data: str | None = Field(
None,
description="Base64-url-safe-encoded ([RFC 4648](https://tools.ietf.org/html/rfc4648#section-5))\ndata to store as secret.\n\nThis field is only used to _create_ a secret, and is not returned by\nother endpoints.\n",
- example="",
+ examples=[""],
)
driver: Driver | None = Field(
None,
@@ -2458,10 +2564,10 @@ class SecretSpec(BaseModel):
class Secret1(BaseModel):
- ID: str | None = Field(None, example="blt1owaxmitz71s9v5zh81zun")
+ ID: str | None = Field(None, examples=["blt1owaxmitz71s9v5zh81zun"])
Version: ObjectVersion | None = None
- CreatedAt: str | None = Field(None, example="2017-07-20T13:55:28.678958722Z")
- UpdatedAt: str | None = Field(None, example="2017-07-20T13:55:28.678958722Z")
+ CreatedAt: str | None = Field(None, examples=["2017-07-20T13:55:28.678958722Z"])
+ UpdatedAt: str | None = Field(None, examples=["2017-07-20T13:55:28.678958722Z"])
Spec: SecretSpec | None = None
@@ -2517,9 +2623,9 @@ class Platform1(BaseModel):
class Component(BaseModel):
- Name: str = Field(..., description="Name of the component\n", example="Engine")
+ Name: str = Field(..., description="Name of the component\n", examples=["Engine"])
Version: str = Field(
- ..., description="Version of the component\n", example="19.03.12"
+ ..., description="Version of the component\n", examples=["19.03.12"]
)
Details: dict[str, Any] | None = Field(
None,
@@ -2538,52 +2644,52 @@ class SystemVersion(BaseModel):
None, description="Information about system components\n"
)
Version: str | None = Field(
- None, description="The version of the daemon", example="19.03.12"
+ None, description="The version of the daemon", examples=["19.03.12"]
)
ApiVersion: str | None = Field(
None,
description="The default (and highest) API version that is supported by the daemon\n",
- example="1.40",
+ examples=["1.40"],
)
MinAPIVersion: str | None = Field(
None,
description="The minimum API version that is supported by the daemon\n",
- example="1.12",
+ examples=["1.12"],
)
GitCommit: str | None = Field(
None,
description="The Git commit of the source code that was used to build the daemon\n",
- example="48a66213fe",
+ examples=["48a66213fe"],
)
GoVersion: str | None = Field(
None,
description="The version Go used to compile the daemon, and the version of the Go\nruntime in use.\n",
- example="go1.13.14",
+ examples=["go1.13.14"],
)
Os: str | None = Field(
None,
description='The operating system that the daemon is running on ("linux" or "windows")\n',
- example="linux",
+ examples=["linux"],
)
Arch: str | None = Field(
None,
description="The architecture that the daemon is running on\n",
- example="amd64",
+ examples=["amd64"],
)
KernelVersion: str | None = Field(
None,
description="The kernel version (`uname -r`) that the daemon is running on.\n\nThis field is omitted when empty.\n",
- example="4.19.76-linuxkit",
+ examples=["4.19.76-linuxkit"],
)
Experimental: bool | None = Field(
None,
description="Indicates if the daemon is started with experimental features enabled.\n\nThis field is omitted when empty / false.\n",
- example=True,
+ examples=[True],
)
BuildTime: str | None = Field(
None,
description="The date and time that the daemon was compiled.\n",
- example="2020-06-22T15:49:27.000000000+00:00",
+ examples=["2020-06-22T15:49:27.000000000+00:00"],
)
@@ -2627,9 +2733,11 @@ class Isolation2(str, Enum):
class DefaultAddressPool(BaseModel):
Base: str | None = Field(
- None, description="The network address in CIDR format", example="10.10.0.0/16"
+ None,
+ description="The network address in CIDR format",
+ examples=["10.10.0.0/16"],
)
- Size: int | None = Field(None, description="The network pool size", example="24")
+ Size: int | None = Field(None, description="The network pool size", examples=["24"])
class PluginsInfo(BaseModel):
@@ -2647,31 +2755,33 @@ class PluginsInfo(BaseModel):
Volume: list[str] | None = Field(
None,
description="Names of available volume-drivers, and network-driver plugins.",
- example=["local"],
+ examples=[["local"]],
)
Network: list[str] | None = Field(
None,
description="Names of available network-drivers, and network-driver plugins.",
- example=["bridge", "host", "ipvlan", "macvlan", "null", "overlay"],
+ examples=[["bridge", "host", "ipvlan", "macvlan", "null", "overlay"]],
)
Authorization: list[str] | None = Field(
None,
description="Names of available authorization plugins.",
- example=["img-authz-plugin", "hbm"],
+ examples=[["img-authz-plugin", "hbm"]],
)
Log: list[str] | None = Field(
None,
description="Names of available logging-drivers, and logging-driver plugins.",
- example=[
- "awslogs",
- "fluentd",
- "gcplogs",
- "gelf",
- "journald",
- "json-file",
- "logentries",
- "splunk",
- "syslog",
+ examples=[
+ [
+ "awslogs",
+ "fluentd",
+ "gcplogs",
+ "gelf",
+ "journald",
+ "json-file",
+ "logentries",
+ "splunk",
+ "syslog",
+ ]
],
)
@@ -2684,26 +2794,28 @@ class IndexInfo(BaseModel):
Name: str | None = Field(
None,
description='Name of the registry, such as "docker.io".\n',
- example="docker.io",
+ examples=["docker.io"],
)
Mirrors: list[str] | None = Field(
None,
description="List of mirrors, expressed as URIs.\n",
- example=[
- "https://hub-mirror.corp.example.com:5000/",
- "https://registry-2.docker.io/",
- "https://registry-3.docker.io/",
+ examples=[
+ [
+ "https://hub-mirror.corp.example.com:5000/",
+ "https://registry-2.docker.io/",
+ "https://registry-3.docker.io/",
+ ]
],
)
Secure: bool | None = Field(
None,
description="Indicates if the registry is part of the list of insecure\nregistries.\n\nIf `false`, the registry is insecure. Insecure registries accept\nun-encrypted (HTTP) and/or untrusted (HTTPS with certificates from\nunknown CAs) communication.\n\n> **Warning**: Insecure registries can be useful when running a local\n> registry. However, because its use creates security vulnerabilities\n> it should ONLY be enabled for testing purposes. For increased\n> security, users should add their CA to their system's list of\n> trusted CAs instead of enabling this option.\n",
- example=True,
+ examples=[True],
)
Official: bool | None = Field(
None,
description="Indicates whether this is an official registry (i.e., Docker Hub / docker.io)\n",
- example=True,
+ examples=[True],
)
@@ -2721,12 +2833,12 @@ class Runtime(BaseModel):
path: str | None = Field(
None,
description="Name and, optional, path, of the OCI executable binary.\n\nIf the path is omitted, the daemon searches the host's `$PATH` for the\nbinary and uses the first result.\n",
- example="/usr/local/bin/my-oci-runtime",
+ examples=["/usr/local/bin/my-oci-runtime"],
)
runtimeArgs: list[str] | None = Field(
None,
description="List of command-line arguments to pass to the runtime when invoked.\n",
- example=["--debug", "--systemd-cgroup=false"],
+ examples=[["--debug", "--systemd-cgroup=false"]],
)
@@ -2741,12 +2853,12 @@ class Commit(BaseModel):
ID: str | None = Field(
None,
description="Actual commit ID of external tool.",
- example="cfb82a876ecc11b5ca0977d1733adbe58599088a",
+ examples=["cfb82a876ecc11b5ca0977d1733adbe58599088a"],
)
Expected: str | None = Field(
None,
description="Commit ID of external tool expected by dockerd as set at build time.\n",
- example="2d41c047c83e09a6d61d464906feb2a2f3c52aa4",
+ examples=["2d41c047c83e09a6d61d464906feb2a2f3c52aa4"],
)
@@ -2805,16 +2917,18 @@ class EventActor(BaseModel):
ID: str | None = Field(
None,
description="The ID of the object emitting the event",
- example="ede54ee1afda366ab42f824e8a5ffd195155d853ceaec74a927f249ea270c743",
+ examples=["ede54ee1afda366ab42f824e8a5ffd195155d853ceaec74a927f249ea270c743"],
)
Attributes: dict[str, str] | None = Field(
None,
description="Various key/value attributes of the object, depending on its type.\n",
- example={
- "com.example.some-label": "some-label-value",
- "image": "alpine:latest",
- "name": "my-container",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-label-value",
+ "image": "alpine:latest",
+ "name": "my-container",
+ }
+ ],
)
@@ -2854,19 +2968,25 @@ class SystemEventsResponse(BaseModel):
"""
Type: Type5 | None = Field(
- None, description="The type of object emitting the event", example="container"
+ None,
+ description="The type of object emitting the event",
+ examples=["container"],
+ )
+ Action: str | None = Field(
+ None, description="The type of event", examples=["create"]
)
- Action: str | None = Field(None, description="The type of event", example="create")
Actor: EventActor | None = None
scope: Scope1 | None = Field(
None,
description="Scope of the event. Engine events are `local` scope. Cluster (Swarm)\nevents are `swarm` scope.\n",
)
- time: int | None = Field(None, description="Timestamp of event", example=1629574695)
+ time: int | None = Field(
+ None, description="Timestamp of event", examples=[1629574695]
+ )
timeNano: int | None = Field(
None,
description="Timestamp of event, with nanosecond accuracy",
- example=1629574695515050031,
+ examples=[1629574695515050031],
)
@@ -2880,15 +3000,17 @@ class OCIDescriptor(BaseModel):
mediaType: str | None = Field(
None,
description="The media type of the object this schema refers to.\n",
- example="application/vnd.docker.distribution.manifest.v2+json",
+ examples=["application/vnd.docker.distribution.manifest.v2+json"],
)
digest: str | None = Field(
None,
description="The digest of the targeted content.\n",
- example="sha256:c0537ff6a5218ef531ece93d4984efc99bbf3f7497c0a7726c88e2bb7584dc96",
+ examples=[
+ "sha256:c0537ff6a5218ef531ece93d4984efc99bbf3f7497c0a7726c88e2bb7584dc96"
+ ],
)
size: int | None = Field(
- None, description="The size in bytes of the blob.\n", example=3987495
+ None, description="The size in bytes of the blob.\n", examples=[3987495]
)
@@ -2902,29 +3024,29 @@ class OCIPlatform(BaseModel):
architecture: str | None = Field(
None,
description="The CPU architecture, for example `amd64` or `ppc64`.\n",
- example="arm",
+ examples=["arm"],
)
os: str | None = Field(
None,
description="The operating system, for example `linux` or `windows`.\n",
- example="windows",
+ examples=["windows"],
)
os_version: str | None = Field(
None,
alias="os.version",
description="Optional field specifying the operating system version, for example on\nWindows `10.0.19041.1165`.\n",
- example="10.0.19041.1165",
+ examples=["10.0.19041.1165"],
)
os_features: list[str] | None = Field(
None,
alias="os.features",
description="Optional field specifying an array of strings, each listing a required\nOS feature (for example on Windows `win32k`).\n",
- example=["win32k"],
+ examples=[["win32k"]],
)
variant: str | None = Field(
None,
description="Optional field specifying a variant of the CPU, for example `v7` to\nspecify ARMv7 when architecture is `arm`.\n",
- example="v7",
+ examples=["v7"],
)
@@ -2948,9 +3070,9 @@ class ResourceObject(BaseModel):
"""
- NanoCPUs: int | None = Field(None, example=4000000000)
- MemoryBytes: int | None = Field(None, example=8272408576)
- GenericResources: GenericResources | None = None
+ NanoCPUs: int | None = Field(None, examples=[4000000000])
+ MemoryBytes: int | None = Field(None, examples=[8272408576])
+ GenericResources_: GenericResources | None = Field(None, alias="GenericResources")
class Health(BaseModel):
@@ -2959,15 +3081,16 @@ class Health(BaseModel):
"""
- Status: Status | None = Field(
+ Status_: Status | None = Field(
None,
+ alias="Status",
description='Status is one of `none`, `starting`, `healthy` or `unhealthy`\n\n- "none" Indicates there is no healthcheck\n- "starting" Starting indicates that the container is not yet ready\n- "healthy" Healthy indicates that the container is running correctly\n- "unhealthy" Unhealthy indicates that the container has a problem\n',
- example="healthy",
+ examples=["healthy"],
)
FailingStreak: int | None = Field(
None,
description="FailingStreak is the number of consecutive failures",
- example=0,
+ examples=[0],
)
Log: list[HealthcheckResult] | None = Field(
None, description="Log contains the last few results (oldest first)\n"
@@ -2986,15 +3109,17 @@ class HostConfig(Resources):
ContainerIDFile: str | None = Field(
None, description="Path to a file where the container ID is written"
)
- LogConfig: LogConfig | None = Field(
- None, description="The logging configuration for this container"
+ LogConfig_: LogConfig | None = Field(
+ None,
+ alias="LogConfig",
+ description="The logging configuration for this container",
)
NetworkMode: str | None = Field(
None,
description="Network mode to use for this container. Supported standard values\nare: `bridge`, `host`, `none`, and `container:`. Any\nother value is taken as a custom network's name to which this\ncontainer should connect to.\n",
)
PortBindings: PortMap | None = None
- RestartPolicy: RestartPolicy | None = None
+ RestartPolicy_: RestartPolicy | None = Field(None, alias="RestartPolicy")
AutoRemove: bool | None = Field(
None,
description="Automatically remove the container when the container's process\nexits. This has no effect if `RestartPolicy` is set.\n",
@@ -3017,8 +3142,9 @@ class HostConfig(Resources):
None,
description="A list of kernel capabilities to drop from the container. Conflicts\nwith option 'Capabilities'.\n",
)
- CgroupnsMode: CgroupnsMode | None = Field(
+ CgroupnsMode_: CgroupnsMode | None = Field(
None,
+ alias="CgroupnsMode",
description='cgroup namespace mode for the container. Possible values are:\n\n- `"private"`: the container runs in its own private cgroup namespace\n- `"host"`: use the host system\'s cgroup namespace\n\nIf not specified, the daemon default is used, which can either be `"private"`\nor `"host"`, depending on daemon version, kernel support and configuration.\n',
)
Dns: list[str] | None = Field(
@@ -3048,7 +3174,7 @@ class HostConfig(Resources):
OomScoreAdj: int | None = Field(
None,
description="An integer value containing the score given to the container in\norder to tune OOM killer preferences.\n",
- example=500,
+ examples=[500],
)
PidMode: str | None = Field(
None,
@@ -3096,11 +3222,13 @@ class HostConfig(Resources):
ConsoleSize: list[ConsoleSizeItem] | None = Field(
None,
description="Initial console size, as an `[height, width]` array. (Windows only)\n",
- max_items=2,
- min_items=2,
+ max_length=2,
+ min_length=2,
)
- Isolation: Isolation | None = Field(
- None, description="Isolation technology of the container. (Windows only)\n"
+ Isolation_: Isolation | None = Field(
+ None,
+ alias="Isolation",
+ description="Isolation technology of the container. (Windows only)\n",
)
MaskedPaths: list[str] | None = Field(
None,
@@ -3156,48 +3284,50 @@ class EndpointSettings(BaseModel):
"""
IPAMConfig: EndpointIPAMConfig | None = None
- Links: list[str] | None = Field(None, example=["container_1", "container_2"])
- Aliases: list[str] | None = Field(None, example=["server_x", "server_y"])
+ Links: list[str] | None = Field(None, examples=[["container_1", "container_2"]])
+ Aliases: list[str] | None = Field(None, examples=[["server_x", "server_y"]])
NetworkID: str | None = Field(
None,
description="Unique ID of the network.\n",
- example="08754567f1f40222263eab4102e1c733ae697e8e354aa9cd6e18d7402835292a",
+ examples=["08754567f1f40222263eab4102e1c733ae697e8e354aa9cd6e18d7402835292a"],
)
EndpointID: str | None = Field(
None,
description="Unique ID for the service endpoint in a Sandbox.\n",
- example="b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b",
+ examples=["b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b"],
)
Gateway: str | None = Field(
- None, description="Gateway address for this network.\n", example="172.17.0.1"
+ None, description="Gateway address for this network.\n", examples=["172.17.0.1"]
)
IPAddress: str | None = Field(
- None, description="IPv4 address.\n", example="172.17.0.4"
+ None, description="IPv4 address.\n", examples=["172.17.0.4"]
)
IPPrefixLen: int | None = Field(
- None, description="Mask length of the IPv4 address.\n", example=16
+ None, description="Mask length of the IPv4 address.\n", examples=[16]
)
IPv6Gateway: str | None = Field(
- None, description="IPv6 gateway address.\n", example="2001:db8:2::100"
+ None, description="IPv6 gateway address.\n", examples=["2001:db8:2::100"]
)
GlobalIPv6Address: str | None = Field(
- None, description="Global IPv6 address.\n", example="2001:db8::5689"
+ None, description="Global IPv6 address.\n", examples=["2001:db8::5689"]
)
GlobalIPv6PrefixLen: int | None = Field(
- None, description="Mask length of the global IPv6 address.\n", example=64
+ None, description="Mask length of the global IPv6 address.\n", examples=[64]
)
MacAddress: str | None = Field(
None,
description="MAC address for the endpoint on this network.\n",
- example="02:42:ac:11:00:04",
+ examples=["02:42:ac:11:00:04"],
)
DriverOpts: dict[str, str] | None = Field(
None,
description="DriverOpts is a mapping of driver options and values. These options\nare passed directly to the driver and are driver specific.\n",
- example={
- "com.example.some-label": "some-value",
- "com.example.some-other-label": "some-other-value",
- },
+ examples=[
+ {
+ "com.example.some-label": "some-value",
+ "com.example.some-other-label": "some-other-value",
+ }
+ ],
)
@@ -3208,11 +3338,11 @@ class NodeDescription(BaseModel):
"""
- Hostname: str | None = Field(None, example="bf3067039e47")
- Platform: Platform | None = None
+ Hostname: str | None = Field(None, examples=["bf3067039e47"])
+ Platform_: Platform | None = Field(None, alias="Platform")
Resources: ResourceObject | None = None
Engine: EngineDescription | None = None
- TLSInfo: TLSInfo | None = None
+ TLSInfo_: TLSInfo | None = Field(None, alias="TLSInfo")
class NodeStatus(BaseModel):
@@ -3224,9 +3354,9 @@ class NodeStatus(BaseModel):
"""
State: NodeState | None = None
- Message: str | None = Field(None, example="")
+ Message: str | None = Field(None, examples=[""])
Addr: str | None = Field(
- None, description="IP address of the node.", example="172.17.0.2"
+ None, description="IP address of the node.", examples=["172.17.0.2"]
)
@@ -3239,12 +3369,12 @@ class ManagerStatus(BaseModel):
"""
- Leader: bool | None = Field(False, example=True)
- Reachability: Reachability | None = None
+ Leader: bool | None = Field(False, examples=[True])
+ Reachability_: Reachability | None = Field(None, alias="Reachability")
Addr: str | None = Field(
None,
description="The IP address and port at which the manager is reachable.\n",
- example="10.0.0.46:2377",
+ examples=["10.0.0.46:2377"],
)
@@ -3266,16 +3396,19 @@ class TaskSpec(BaseModel):
User modifiable task configuration.
"""
- PluginSpec: PluginSpec | None = Field(
+ PluginSpec_: PluginSpec | None = Field(
None,
+ alias="PluginSpec",
description="Plugin spec for the service. *(Experimental release only.)*\n\n
\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n",
)
- ContainerSpec: ContainerSpec | None = Field(
+ ContainerSpec_: ContainerSpec | None = Field(
None,
+ alias="ContainerSpec",
description="Container spec for the service.\n\n
\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n",
)
- NetworkAttachmentSpec: NetworkAttachmentSpec | None = Field(
+ NetworkAttachmentSpec_: NetworkAttachmentSpec | None = Field(
None,
+ alias="NetworkAttachmentSpec",
description="Read-only spec type for non-swarm containers attached to swarm overlay\nnetworks.\n\n
\n\n> **Note**: ContainerSpec, NetworkAttachmentSpec, and PluginSpec are\n> mutually exclusive. PluginSpec is only used when the Runtime field\n> is set to `plugin`. NetworkAttachmentSpec is used when the Runtime\n> field is set to `attachment`.\n",
)
Resources: Resources1 | None = Field(
@@ -3286,7 +3419,7 @@ class TaskSpec(BaseModel):
None,
description="Specification for the restart policy which applies to containers\ncreated as part of this service.\n",
)
- Placement: Placement | None = None
+ Placement_: Placement | None = Field(None, alias="Placement")
ForceUpdate: int | None = Field(
None,
description="A counter that triggers an update even if no relevant parameters have\nbeen changed.\n",
@@ -3340,17 +3473,23 @@ class ServiceSpec(BaseModel):
None, description="User-defined key/value metadata."
)
TaskTemplate: TaskSpec | None = None
- Mode: Mode | None = Field(None, description="Scheduling mode for the service.")
- UpdateConfig: UpdateConfig | None = Field(
- None, description="Specification for the update strategy of the service."
+ Mode_: Mode | None = Field(
+ None, alias="Mode", description="Scheduling mode for the service."
)
- RollbackConfig: RollbackConfig | None = Field(
- None, description="Specification for the rollback strategy of the service."
+ UpdateConfig_: UpdateConfig | None = Field(
+ None,
+ alias="UpdateConfig",
+ description="Specification for the update strategy of the service.",
+ )
+ RollbackConfig_: RollbackConfig | None = Field(
+ None,
+ alias="RollbackConfig",
+ description="Specification for the rollback strategy of the service.",
)
Networks: list[NetworkAttachmentConfig] | None = Field(
None, description="Specifies which networks the service should attach to."
)
- EndpointSpec: EndpointSpec | None = None
+ EndpointSpec_: EndpointSpec | None = Field(None, alias="EndpointSpec")
class Service(BaseModel):
@@ -3359,16 +3498,18 @@ class Service(BaseModel):
CreatedAt: str | None = None
UpdatedAt: str | None = None
Spec: ServiceSpec | None = None
- Endpoint: Endpoint | None = None
- UpdateStatus: UpdateStatus | None = Field(
- None, description="The status of a service update."
+ Endpoint_: Endpoint | None = Field(None, alias="Endpoint")
+ UpdateStatus_: UpdateStatus | None = Field(
+ None, alias="UpdateStatus", description="The status of a service update."
)
- ServiceStatus: ServiceStatus | None = Field(
+ ServiceStatus_: ServiceStatus | None = Field(
None,
+ alias="ServiceStatus",
description="The status of the service's tasks. Provided only when requested as\npart of a ServiceList operation.\n",
)
- JobStatus: JobStatus | None = Field(
+ JobStatus_: JobStatus | None = Field(
None,
+ alias="JobStatus",
description="The status of the service when it is in one of ReplicatedJob or\nGlobalJob modes. Absent on Replicated and Global mode services. The\nJobIteration is an ObjectVersion, but unlike the Service's version,\ndoes not need to be sent with an update request.\n",
)
@@ -3433,43 +3574,43 @@ class ContainerState(BaseModel):
Status: Status2 | None = Field(
None,
description='String representation of the container state. Can be one of "created",\n"running", "paused", "restarting", "removing", "exited", or "dead".\n',
- example="running",
+ examples=["running"],
)
Running: bool | None = Field(
None,
description='Whether this container is running.\n\nNote that a running container can be _paused_. The `Running` and `Paused`\nbooleans are not mutually exclusive:\n\nWhen pausing a container (on Linux), the freezer cgroup is used to suspend\nall processes in the container. Freezing the process requires the process to\nbe running. As a result, paused containers are both `Running` _and_ `Paused`.\n\nUse the `Status` field instead to determine if a container\'s state is "running".\n',
- example=True,
+ examples=[True],
)
Paused: bool | None = Field(
- None, description="Whether this container is paused.", example=False
+ None, description="Whether this container is paused.", examples=[False]
)
Restarting: bool | None = Field(
- None, description="Whether this container is restarting.", example=False
+ None, description="Whether this container is restarting.", examples=[False]
)
OOMKilled: bool | None = Field(
None,
description="Whether this container has been killed because it ran out of memory.\n",
- example=False,
+ examples=[False],
)
- Dead: bool | None = Field(None, example=False)
+ Dead: bool | None = Field(None, examples=[False])
Pid: int | None = Field(
- None, description="The process ID of this container", example=1234
+ None, description="The process ID of this container", examples=[1234]
)
ExitCode: int | None = Field(
- None, description="The last exit code of this container", example=0
+ None, description="The last exit code of this container", examples=[0]
)
Error: str | None = None
StartedAt: str | None = Field(
None,
description="The time when this container was last started.",
- example="2020-01-06T09:06:59.461876391Z",
+ examples=["2020-01-06T09:06:59.461876391Z"],
)
FinishedAt: str | None = Field(
None,
description="The time when this container last exited.",
- example="2020-01-06T09:07:59.461876391Z",
+ examples=["2020-01-06T09:07:59.461876391Z"],
)
- Health: Health | None = None
+ Health_: Health | None = Field(None, alias="Health")
class ContainerWaitResponse(BaseModel):
@@ -3490,56 +3631,62 @@ class RegistryServiceConfig(BaseModel):
AllowNondistributableArtifactsCIDRs: list[str] | None = Field(
None,
description="List of IP ranges to which nondistributable artifacts can be pushed,\nusing the CIDR syntax [RFC 4632](https://tools.ietf.org/html/4632).\n\nSome images (for example, Windows base images) contain artifacts\nwhose distribution is restricted by license. When these images are\npushed to a registry, restricted artifacts are not included.\n\nThis configuration override this behavior, and enables the daemon to\npush nondistributable artifacts to all registries whose resolved IP\naddress is within the subnet described by the CIDR syntax.\n\nThis option is useful when pushing images containing\nnondistributable artifacts to a registry on an air-gapped network so\nhosts on that network can pull the images without connecting to\nanother server.\n\n> **Warning**: Nondistributable artifacts typically have restrictions\n> on how and where they can be distributed and shared. Only use this\n> feature to push artifacts to private registries and ensure that you\n> are in compliance with any terms that cover redistributing\n> nondistributable artifacts.\n",
- example=["::1/128", "127.0.0.0/8"],
+ examples=[["::1/128", "127.0.0.0/8"]],
)
AllowNondistributableArtifactsHostnames: list[str] | None = Field(
None,
description="List of registry hostnames to which nondistributable artifacts can be\npushed, using the format `[:]` or `[:]`.\n\nSome images (for example, Windows base images) contain artifacts\nwhose distribution is restricted by license. When these images are\npushed to a registry, restricted artifacts are not included.\n\nThis configuration override this behavior for the specified\nregistries.\n\nThis option is useful when pushing images containing\nnondistributable artifacts to a registry on an air-gapped network so\nhosts on that network can pull the images without connecting to\nanother server.\n\n> **Warning**: Nondistributable artifacts typically have restrictions\n> on how and where they can be distributed and shared. Only use this\n> feature to push artifacts to private registries and ensure that you\n> are in compliance with any terms that cover redistributing\n> nondistributable artifacts.\n",
- example=[
- "registry.internal.corp.example.com:3000",
- "[2001:db8:a0b:12f0::1]:443",
+ examples=[
+ [
+ "registry.internal.corp.example.com:3000",
+ "[2001:db8:a0b:12f0::1]:443",
+ ]
],
)
InsecureRegistryCIDRs: list[str] | None = Field(
None,
description="List of IP ranges of insecure registries, using the CIDR syntax\n([RFC 4632](https://tools.ietf.org/html/4632)). Insecure registries\naccept un-encrypted (HTTP) and/or untrusted (HTTPS with certificates\nfrom unknown CAs) communication.\n\nBy default, local registries (`127.0.0.0/8`) are configured as\ninsecure. All other registries are secure. Communicating with an\ninsecure registry is not possible if the daemon assumes that registry\nis secure.\n\nThis configuration override this behavior, insecure communication with\nregistries whose resolved IP address is within the subnet described by\nthe CIDR syntax.\n\nRegistries can also be marked insecure by hostname. Those registries\nare listed under `IndexConfigs` and have their `Secure` field set to\n`false`.\n\n> **Warning**: Using this option can be useful when running a local\n> registry, but introduces security vulnerabilities. This option\n> should therefore ONLY be used for testing purposes. For increased\n> security, users should add their CA to their system's list of trusted\n> CAs instead of enabling this option.\n",
- example=["::1/128", "127.0.0.0/8"],
+ examples=[["::1/128", "127.0.0.0/8"]],
)
IndexConfigs: dict[str, IndexInfo] | None = Field(
None,
- example={
- "127.0.0.1:5000": {
- "Name": "127.0.0.1:5000",
- "Mirrors": [],
- "Secure": False,
- "Official": False,
- },
- "[2001:db8:a0b:12f0::1]:80": {
- "Name": "[2001:db8:a0b:12f0::1]:80",
- "Mirrors": [],
- "Secure": False,
- "Official": False,
- },
- "docker.io": {
- "Name": "docker.io",
- "Mirrors": ["https://hub-mirror.corp.example.com:5000/"],
- "Secure": True,
- "Official": True,
- },
- "registry.internal.corp.example.com:3000": {
- "Name": "registry.internal.corp.example.com:3000",
- "Mirrors": [],
- "Secure": False,
- "Official": False,
- },
- },
+ examples=[
+ {
+ "127.0.0.1:5000": {
+ "Name": "127.0.0.1:5000",
+ "Mirrors": [],
+ "Secure": False,
+ "Official": False,
+ },
+ "[2001:db8:a0b:12f0::1]:80": {
+ "Name": "[2001:db8:a0b:12f0::1]:80",
+ "Mirrors": [],
+ "Secure": False,
+ "Official": False,
+ },
+ "docker.io": {
+ "Name": "docker.io",
+ "Mirrors": ["https://hub-mirror.corp.example.com:5000/"],
+ "Secure": True,
+ "Official": True,
+ },
+ "registry.internal.corp.example.com:3000": {
+ "Name": "registry.internal.corp.example.com:3000",
+ "Mirrors": [],
+ "Secure": False,
+ "Official": False,
+ },
+ }
+ ],
)
Mirrors: list[str] | None = Field(
None,
description="List of registry URLs that act as a mirror for the official\n(`docker.io`) registry.\n",
- example=[
- "https://hub-mirror.corp.example.com:5000/",
- "https://[2001:db8:a0b:12f0::1]/",
+ examples=[
+ [
+ "https://hub-mirror.corp.example.com:5000/",
+ "https://[2001:db8:a0b:12f0::1]/",
+ ]
],
)
@@ -3553,30 +3700,32 @@ class SwarmInfo(BaseModel):
NodeID: str | None = Field(
"",
description="Unique identifier of for this node in the swarm.",
- example="k67qz4598weg5unwwffg6z1m1",
+ examples=["k67qz4598weg5unwwffg6z1m1"],
)
NodeAddr: str | None = Field(
"",
description="IP address at which this node can be reached by other nodes in the\nswarm.\n",
- example="10.0.0.46",
+ examples=["10.0.0.46"],
)
- LocalNodeState: LocalNodeState | None = None
- ControlAvailable: bool | None = Field(False, example=True)
+ LocalNodeState_: LocalNodeState | None = Field(None, alias="LocalNodeState")
+ ControlAvailable: bool | None = Field(False, examples=[True])
Error: str | None = ""
RemoteManagers: list[PeerNode] | None = Field(
None,
description="List of ID's and addresses of other managers in the swarm.\n",
- example=[
- {"NodeID": "71izy0goik036k48jg985xnds", "Addr": "10.0.0.158:2377"},
- {"NodeID": "79y6h1o4gv8n120drcprv5nmc", "Addr": "10.0.0.159:2377"},
- {"NodeID": "k67qz4598weg5unwwffg6z1m1", "Addr": "10.0.0.46:2377"},
+ examples=[
+ [
+ {"NodeID": "71izy0goik036k48jg985xnds", "Addr": "10.0.0.158:2377"},
+ {"NodeID": "79y6h1o4gv8n120drcprv5nmc", "Addr": "10.0.0.159:2377"},
+ {"NodeID": "k67qz4598weg5unwwffg6z1m1", "Addr": "10.0.0.46:2377"},
+ ]
],
)
Nodes: int | None = Field(
- None, description="Total number of nodes in the swarm.", example=4
+ None, description="Total number of nodes in the swarm.", examples=[4]
)
Managers: int | None = Field(
- None, description="Total number of managers in the swarm.", example=3
+ None, description="Total number of managers in the swarm.", examples=[3]
)
Cluster: ClusterInfo | None = None
@@ -3604,73 +3753,73 @@ class NetworkSettings(BaseModel):
Bridge: str | None = Field(
None,
description="Name of the network'a bridge (for example, `docker0`).",
- example="docker0",
+ examples=["docker0"],
)
SandboxID: str | None = Field(
None,
description="SandboxID uniquely represents a container's network stack.",
- example="9d12daf2c33f5959c8bf90aa513e4f65b561738661003029ec84830cd503a0c3",
+ examples=["9d12daf2c33f5959c8bf90aa513e4f65b561738661003029ec84830cd503a0c3"],
)
HairpinMode: bool | None = Field(
None,
description="Indicates if hairpin NAT should be enabled on the virtual interface.\n",
- example=False,
+ examples=[False],
)
LinkLocalIPv6Address: str | None = Field(
None,
description="IPv6 unicast address using the link-local prefix.",
- example="fe80::42:acff:fe11:1",
+ examples=["fe80::42:acff:fe11:1"],
)
LinkLocalIPv6PrefixLen: int | None = Field(
- None, description="Prefix length of the IPv6 unicast address.", example="64"
+ None, description="Prefix length of the IPv6 unicast address.", examples=["64"]
)
Ports: PortMap | None = None
SandboxKey: str | None = Field(
None,
description="SandboxKey identifies the sandbox",
- example="/var/run/docker/netns/8ab54b426c38",
+ examples=["/var/run/docker/netns/8ab54b426c38"],
)
SecondaryIPAddresses: list[Address] | None = Field(None, description="")
SecondaryIPv6Addresses: list[Address] | None = Field(None, description="")
EndpointID: str | None = Field(
None,
description='EndpointID uniquely represents a service endpoint in a Sandbox.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example="b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b",
+ examples=["b88f5b905aabf2893f3cbc4ee42d1ea7980bbc0a92e2c8922b1e1795298afb0b"],
)
Gateway: str | None = Field(
None,
description='Gateway address for the default "bridge" network.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example="172.17.0.1",
+ examples=["172.17.0.1"],
)
GlobalIPv6Address: str | None = Field(
None,
description='Global IPv6 address for the default "bridge" network.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example="2001:db8::5689",
+ examples=["2001:db8::5689"],
)
GlobalIPv6PrefixLen: int | None = Field(
None,
description='Mask length of the global IPv6 address.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example=64,
+ examples=[64],
)
IPAddress: str | None = Field(
None,
description='IPv4 address for the default "bridge" network.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example="172.17.0.4",
+ examples=["172.17.0.4"],
)
IPPrefixLen: int | None = Field(
None,
description='Mask length of the IPv4 address.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example=16,
+ examples=[16],
)
IPv6Gateway: str | None = Field(
None,
description='IPv6 gateway address for this network.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example="2001:db8:2::100",
+ examples=["2001:db8:2::100"],
)
MacAddress: str | None = Field(
None,
description='MAC address for the container on the default "bridge" network.\n\n
\n\n> **Deprecated**: This field is only propagated when attached to the\n> default "bridge" network. Use the information from the "bridge"\n> network inside the `Networks` map instead, which contains the same\n> information. This field was deprecated in Docker 1.9 and is scheduled\n> to be removed in Docker 17.12.0\n',
- example="02:42:ac:11:00:04",
+ examples=["02:42:ac:11:00:04"],
)
Networks: dict[str, EndpointSettings] | None = Field(
None,
@@ -3685,7 +3834,7 @@ class Network(BaseModel):
Scope: str | None = None
Driver: str | None = None
EnableIPv6: bool | None = None
- IPAM: IPAM | None = None
+ IPAM_: IPAM | None = Field(None, alias="IPAM")
Internal: bool | None = None
Attachable: bool | None = None
Ingress: bool | None = None
@@ -3695,267 +3844,279 @@ class Network(BaseModel):
class Node(BaseModel):
- ID: str | None = Field(None, example="24ifsmvkjbyhk")
+ ID: str | None = Field(None, examples=["24ifsmvkjbyhk"])
Version: ObjectVersion | None = None
CreatedAt: str | None = Field(
None,
description="Date and time at which the node was added to the swarm in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2016-08-18T10:44:24.496525531Z",
+ examples=["2016-08-18T10:44:24.496525531Z"],
)
UpdatedAt: str | None = Field(
None,
description="Date and time at which the node was last updated in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n",
- example="2017-08-09T07:09:37.632105588Z",
+ examples=["2017-08-09T07:09:37.632105588Z"],
)
Spec: NodeSpec | None = None
Description: NodeDescription | None = None
Status: NodeStatus | None = None
- ManagerStatus: ManagerStatus | None = None
+ ManagerStatus_: ManagerStatus | None = Field(None, alias="ManagerStatus")
class SystemInfo(BaseModel):
ID: str | None = Field(
None,
description="Unique identifier of the daemon.\n\n
\n\n> **Note**: The format of the ID itself is not part of the API, and\n> should not be considered stable.\n",
- example="7TRN:IPZB:QYBB:VPBQ:UMPP:KARE:6ZNR:XE6T:7EWV:PKF4:ZOJD:TPYS",
+ examples=["7TRN:IPZB:QYBB:VPBQ:UMPP:KARE:6ZNR:XE6T:7EWV:PKF4:ZOJD:TPYS"],
)
Containers: int | None = Field(
- None, description="Total number of containers on the host.", example=14
+ None, description="Total number of containers on the host.", examples=[14]
)
ContainersRunning: int | None = Field(
- None, description='Number of containers with status `"running"`.\n', example=3
+ None,
+ description='Number of containers with status `"running"`.\n',
+ examples=[3],
)
ContainersPaused: int | None = Field(
- None, description='Number of containers with status `"paused"`.\n', example=1
+ None, description='Number of containers with status `"paused"`.\n', examples=[1]
)
ContainersStopped: int | None = Field(
- None, description='Number of containers with status `"stopped"`.\n', example=10
+ None,
+ description='Number of containers with status `"stopped"`.\n',
+ examples=[10],
)
Images: int | None = Field(
None,
description="Total number of images on the host.\n\nBoth _tagged_ and _untagged_ (dangling) images are counted.\n",
- example=508,
+ examples=[508],
)
Driver: str | None = Field(
- None, description="Name of the storage driver in use.", example="overlay2"
+ None, description="Name of the storage driver in use.", examples=["overlay2"]
)
DriverStatus: list[list[str]] | None = Field(
None,
description='Information specific to the storage driver, provided as\n"label" / "value" pairs.\n\nThis information is provided by the storage driver, and formatted\nin a way consistent with the output of `docker info` on the command\nline.\n\n
\n\n> **Note**: The information returned in this field, including the\n> formatting of values and labels, should not be considered stable,\n> and may change without notice.\n',
- example=[
- ["Backing Filesystem", "extfs"],
- ["Supports d_type", "true"],
- ["Native Overlay Diff", "true"],
+ examples=[
+ [
+ ["Backing Filesystem", "extfs"],
+ ["Supports d_type", "true"],
+ ["Native Overlay Diff", "true"],
+ ]
],
)
DockerRootDir: str | None = Field(
None,
description="Root directory of persistent Docker state.\n\nDefaults to `/var/lib/docker` on Linux, and `C:\\ProgramData\\docker`\non Windows.\n",
- example="/var/lib/docker",
+ examples=["/var/lib/docker"],
)
Plugins: PluginsInfo | None = None
MemoryLimit: bool | None = Field(
None,
description="Indicates if the host has memory limit support enabled.",
- example=True,
+ examples=[True],
)
SwapLimit: bool | None = Field(
None,
description="Indicates if the host has memory swap limit support enabled.",
- example=True,
+ examples=[True],
)
KernelMemory: bool | None = Field(
None,
description="Indicates if the host has kernel memory limit support enabled.\n\n
\n\n> **Deprecated**: This field is deprecated as the kernel 5.4 deprecated\n> `kmem.limit_in_bytes`.\n",
- example=True,
+ examples=[True],
)
KernelMemoryTCP: bool | None = Field(
None,
description="Indicates if the host has kernel memory TCP limit support enabled.\n\nKernel memory TCP limits are not supported when using cgroups v2, which\ndoes not support the corresponding `memory.kmem.tcp.limit_in_bytes` cgroup.\n",
- example=True,
+ examples=[True],
)
CpuCfsPeriod: bool | None = Field(
None,
description="Indicates if CPU CFS(Completely Fair Scheduler) period is supported by\nthe host.\n",
- example=True,
+ examples=[True],
)
CpuCfsQuota: bool | None = Field(
None,
description="Indicates if CPU CFS(Completely Fair Scheduler) quota is supported by\nthe host.\n",
- example=True,
+ examples=[True],
)
CPUShares: bool | None = Field(
None,
description="Indicates if CPU Shares limiting is supported by the host.\n",
- example=True,
+ examples=[True],
)
CPUSet: bool | None = Field(
None,
description="Indicates if CPUsets (cpuset.cpus, cpuset.mems) are supported by the host.\n\nSee [cpuset(7)](https://www.kernel.org/doc/Documentation/cgroup-v1/cpusets.txt)\n",
- example=True,
+ examples=[True],
)
PidsLimit: bool | None = Field(
None,
description="Indicates if the host kernel has PID limit support enabled.",
- example=True,
+ examples=[True],
)
OomKillDisable: bool | None = Field(
None, description="Indicates if OOM killer disable is supported on the host."
)
IPv4Forwarding: bool | None = Field(
- None, description="Indicates IPv4 forwarding is enabled.", example=True
+ None, description="Indicates IPv4 forwarding is enabled.", examples=[True]
)
BridgeNfIptables: bool | None = Field(
None,
description="Indicates if `bridge-nf-call-iptables` is available on the host.",
- example=True,
+ examples=[True],
)
BridgeNfIp6tables: bool | None = Field(
None,
description="Indicates if `bridge-nf-call-ip6tables` is available on the host.",
- example=True,
+ examples=[True],
)
Debug: bool | None = Field(
None,
description="Indicates if the daemon is running in debug-mode / with debug-level\nlogging enabled.\n",
- example=True,
+ examples=[True],
)
NFd: int | None = Field(
None,
description="The total number of file Descriptors in use by the daemon process.\n\nThis information is only returned if debug-mode is enabled.\n",
- example=64,
+ examples=[64],
)
NGoroutines: int | None = Field(
None,
description="The number of goroutines that currently exist.\n\nThis information is only returned if debug-mode is enabled.\n",
- example=174,
+ examples=[174],
)
SystemTime: str | None = Field(
None,
description="Current system-time in [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt)\nformat with nano-seconds.\n",
- example="2017-08-08T20:28:29.06202363Z",
+ examples=["2017-08-08T20:28:29.06202363Z"],
)
LoggingDriver: str | None = Field(
None, description="The logging driver to use as a default for new containers.\n"
)
- CgroupDriver: CgroupDriver | None = Field(
+ CgroupDriver_: CgroupDriver | None = Field(
CgroupDriver.cgroupfs,
+ alias="CgroupDriver",
description="The driver to use for managing cgroups.\n",
- example="cgroupfs",
+ examples=["cgroupfs"],
)
- CgroupVersion: CgroupVersion | None = Field(
- CgroupVersion.field_1, description="The version of the cgroup.\n", example="1"
+ CgroupVersion_: CgroupVersion | None = Field(
+ CgroupVersion.field_1,
+ alias="CgroupVersion",
+ description="The version of the cgroup.\n",
+ examples=["1"],
)
NEventsListener: int | None = Field(
- None, description="Number of event listeners subscribed.", example=30
+ None, description="Number of event listeners subscribed.", examples=[30]
)
KernelVersion: str | None = Field(
None,
description='Kernel version of the host.\n\nOn Linux, this information obtained from `uname`. On Windows this\ninformation is queried from the HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows NT\\\\CurrentVersion\\\\\nregistry value, for example _"10.0 14393 (14393.1198.amd64fre.rs1_release_sec.170427-1353)"_.\n',
- example="4.9.38-moby",
+ examples=["4.9.38-moby"],
)
OperatingSystem: str | None = Field(
None,
description='Name of the host\'s operating system, for example: "Ubuntu 16.04.2 LTS"\nor "Windows Server 2016 Datacenter"\n',
- example="Alpine Linux v3.5",
+ examples=["Alpine Linux v3.5"],
)
OSVersion: str | None = Field(
None,
description="Version of the host's operating system\n\n
\n\n> **Note**: The information returned in this field, including its\n> very existence, and the formatting of values, should not be considered\n> stable, and may change without notice.\n",
- example="16.04",
+ examples=["16.04"],
)
OSType: str | None = Field(
None,
description='Generic type of the operating system of the host, as returned by the\nGo runtime (`GOOS`).\n\nCurrently returned values are "linux" and "windows". A full list of\npossible values can be found in the [Go documentation](https://golang.org/doc/install/source#environment).\n',
- example="linux",
+ examples=["linux"],
)
Architecture: str | None = Field(
None,
description="Hardware architecture of the host, as returned by the Go runtime\n(`GOARCH`).\n\nA full list of possible values can be found in the [Go documentation](https://golang.org/doc/install/source#environment).\n",
- example="x86_64",
+ examples=["x86_64"],
)
NCPU: int | None = Field(
None,
description="The number of logical CPUs usable by the daemon.\n\nThe number of available CPUs is checked by querying the operating\nsystem when the daemon starts. Changes to operating system CPU\nallocation after the daemon is started are not reflected.\n",
- example=4,
+ examples=[4],
)
MemTotal: int | None = Field(
None,
description="Total amount of physical memory available on the host, in bytes.\n",
- example=2095882240,
+ examples=[2095882240],
)
IndexServerAddress: str | None = Field(
"https://index.docker.io/v1/",
description="Address / URL of the index server that is used for image search,\nand as a default for user authentication for Docker Hub and Docker Cloud.\n",
- example="https://index.docker.io/v1/",
+ examples=["https://index.docker.io/v1/"],
)
RegistryConfig: RegistryServiceConfig | None = None
- GenericResources: GenericResources | None = None
+ GenericResources_: GenericResources | None = Field(None, alias="GenericResources")
HttpProxy: str | None = Field(
None,
description="HTTP-proxy configured for the daemon. This value is obtained from the\n[`HTTP_PROXY`](https://www.gnu.org/software/wget/manual/html_node/Proxies.html) environment variable.\nCredentials ([user info component](https://tools.ietf.org/html/rfc3986#section-3.2.1)) in the proxy URL\nare masked in the API response.\n\nContainers do not automatically inherit this configuration.\n",
- example="http://xxxxx:xxxxx@proxy.corp.example.com:8080",
+ examples=["http://xxxxx:xxxxx@proxy.corp.example.com:8080"],
)
HttpsProxy: str | None = Field(
None,
description="HTTPS-proxy configured for the daemon. This value is obtained from the\n[`HTTPS_PROXY`](https://www.gnu.org/software/wget/manual/html_node/Proxies.html) environment variable.\nCredentials ([user info component](https://tools.ietf.org/html/rfc3986#section-3.2.1)) in the proxy URL\nare masked in the API response.\n\nContainers do not automatically inherit this configuration.\n",
- example="https://xxxxx:xxxxx@proxy.corp.example.com:4443",
+ examples=["https://xxxxx:xxxxx@proxy.corp.example.com:4443"],
)
NoProxy: str | None = Field(
None,
description="Comma-separated list of domain extensions for which no proxy should be\nused. This value is obtained from the [`NO_PROXY`](https://www.gnu.org/software/wget/manual/html_node/Proxies.html)\nenvironment variable.\n\nContainers do not automatically inherit this configuration.\n",
- example="*.local, 169.254/16",
+ examples=["*.local, 169.254/16"],
)
Name: str | None = Field(
- None, description="Hostname of the host.", example="node5.corp.example.com"
+ None, description="Hostname of the host.", examples=["node5.corp.example.com"]
)
Labels: list[str] | None = Field(
None,
description="User-defined labels (key/value metadata) as set on the daemon.\n\n
\n\n> **Note**: When part of a Swarm, nodes can both have _daemon_ labels,\n> set through the daemon configuration, and _node_ labels, set from a\n> manager node in the Swarm. Node labels are not included in this\n> field. Node labels can be retrieved using the `/nodes/(id)` endpoint\n> on a manager node in the Swarm.\n",
- example=["storage=ssd", "production"],
+ examples=[["storage=ssd", "production"]],
)
ExperimentalBuild: bool | None = Field(
None,
description="Indicates if experimental features are enabled on the daemon.\n",
- example=True,
+ examples=[True],
)
ServerVersion: str | None = Field(
None,
description="Version string of the daemon.\n\n> **Note**: the [standalone Swarm API](/swarm/swarm-api/)\n> returns the Swarm version instead of the daemon version, for example\n> `swarm/1.2.8`.\n",
- example="17.06.0-ce",
+ examples=["17.06.0-ce"],
)
ClusterStore: str | None = Field(
None,
description="URL of the distributed storage backend.\n\n\nThe storage backend is used for multihost networking (to store\nnetwork and endpoint information) and by the node discovery mechanism.\n\n
\n\n> **Deprecated**: This field is only propagated when using standalone Swarm\n> mode, and overlay networking using an external k/v store. Overlay\n> networks with Swarm mode enabled use the built-in raft store, and\n> this field will be empty.\n",
- example="consul://consul.corp.example.com:8600/some/path",
+ examples=["consul://consul.corp.example.com:8600/some/path"],
)
ClusterAdvertise: str | None = Field(
None,
description="The network endpoint that the Engine advertises for the purpose of\nnode discovery. ClusterAdvertise is a `host:port` combination on which\nthe daemon is reachable by other hosts.\n\n
\n\n> **Deprecated**: This field is only propagated when using standalone Swarm\n> mode, and overlay networking using an external k/v store. Overlay\n> networks with Swarm mode enabled use the built-in raft store, and\n> this field will be empty.\n",
- example="node5.corp.example.com:8000",
+ examples=["node5.corp.example.com:8000"],
)
Runtimes: dict[str, Runtime] | None = Field(
{"runc": {"path": "runc"}},
description='List of [OCI compliant](https://github.com/opencontainers/runtime-spec)\nruntimes configured on the daemon. Keys hold the "name" used to\nreference the runtime.\n\nThe Docker daemon relies on an OCI compliant runtime (invoked via the\n`containerd` daemon) as its interface to the Linux kernel namespaces,\ncgroups, and SELinux.\n\nThe default runtime is `runc`, and automatically configured. Additional\nruntimes can be configured by the user and will be listed here.\n',
- example={
- "runc": {"path": "runc"},
- "runc-master": {"path": "/go/bin/runc"},
- "custom": {
- "path": "/usr/local/bin/my-oci-runtime",
- "runtimeArgs": ["--debug", "--systemd-cgroup=false"],
- },
- },
+ examples=[
+ {
+ "runc": {"path": "runc"},
+ "runc-master": {"path": "/go/bin/runc"},
+ "custom": {
+ "path": "/usr/local/bin/my-oci-runtime",
+ "runtimeArgs": ["--debug", "--systemd-cgroup=false"],
+ },
+ }
+ ],
)
DefaultRuntime: str | None = Field(
"runc",
description="Name of the default OCI runtime that is used when starting containers.\n\nThe default can be overridden per-container at create time.\n",
- example="runc",
+ examples=["runc"],
)
Swarm: SwarmInfo | None = None
LiveRestoreEnabled: bool | None = Field(
False,
description="Indicates if live restore is enabled.\n\nIf enabled, containers are kept running when the daemon is shutdown\nor upon daemon start if running containers are detected.\n",
- example=False,
+ examples=[False],
)
Isolation: Isolation2 | None = Field(
Isolation2.default,
@@ -3964,7 +4125,7 @@ class SystemInfo(BaseModel):
InitBinary: str | None = Field(
None,
description="Name and, optional, path of the `docker-init` binary.\n\nIf the path is omitted, the daemon searches the host's `$PATH` for the\nbinary and uses the first result.\n",
- example="docker-init",
+ examples=["docker-init"],
)
ContainerdCommit: Commit | None = None
RuncCommit: Commit | None = None
@@ -3972,18 +4133,20 @@ class SystemInfo(BaseModel):
SecurityOptions: list[str] | None = Field(
None,
description="List of security features that are enabled on the daemon, such as\napparmor, seccomp, SELinux, user-namespaces (userns), and rootless.\n\nAdditional configuration options for each security feature may\nbe present, and are included as a comma-separated list of key/value\npairs.\n",
- example=[
- "name=apparmor",
- "name=seccomp,profile=default",
- "name=selinux",
- "name=userns",
- "name=rootless",
+ examples=[
+ [
+ "name=apparmor",
+ "name=seccomp,profile=default",
+ "name=selinux",
+ "name=userns",
+ "name=rootless",
+ ]
],
)
ProductLicense: str | None = Field(
None,
description="Reports a summary of the product license on the daemon.\n\nIf a commercial license has been applied to the daemon, information\nsuch as number of nodes, and expiration are included.\n",
- example="Community Engine",
+ examples=["Community Engine"],
)
DefaultAddressPools: list[DefaultAddressPool] | None = Field(
None,
@@ -3992,9 +4155,11 @@ class SystemInfo(BaseModel):
Warnings: list[str] | None = Field(
None,
description="List of warnings / informational messages about missing features, or\nissues related to the daemon configuration.\n\nThese messages can be printed by the client as information to the user.\n",
- example=[
- "WARNING: No memory limit support",
- "WARNING: bridge-nf-call-iptables is disabled",
- "WARNING: bridge-nf-call-ip6tables is disabled",
+ examples=[
+ [
+ "WARNING: No memory limit support",
+ "WARNING: bridge-nf-call-iptables is disabled",
+ "WARNING: bridge-nf-call-ip6tables is disabled",
+ ]
],
)
diff --git a/packages/models-library/src/models_library/generics.py b/packages/models-library/src/models_library/generics.py
index 50d6f3398100..753510d088b5 100644
--- a/packages/models-library/src/models_library/generics.py
+++ b/packages/models-library/src/models_library/generics.py
@@ -1,66 +1,66 @@
from collections.abc import ItemsView, Iterable, Iterator, KeysView, ValuesView
from typing import Any, Generic, TypeVar
-from pydantic.generics import GenericModel
+from pydantic import BaseModel, RootModel
DictKey = TypeVar("DictKey")
DictValue = TypeVar("DictValue")
-class DictModel(GenericModel, Generic[DictKey, DictValue]):
- __root__: dict[DictKey, DictValue]
+class DictModel(RootModel[dict[DictKey, DictValue]], Generic[DictKey, DictValue]):
+ root: dict[DictKey, DictValue]
def __getitem__(self, k: DictKey) -> DictValue:
- return self.__root__.__getitem__(k)
+ return self.root.__getitem__(k)
def __setitem__(self, k: DictKey, v: DictValue) -> None:
- self.__root__.__setitem__(k, v)
+ self.root.__setitem__(k, v)
def items(self) -> ItemsView[DictKey, DictValue]:
- return self.__root__.items()
+ return self.root.items()
def keys(self) -> KeysView[DictKey]:
- return self.__root__.keys()
+ return self.root.keys()
def values(self) -> ValuesView[DictValue]:
- return self.__root__.values()
+ return self.root.values()
def update(self, *s: Iterable[tuple[DictKey, DictValue]]) -> None:
- return self.__root__.update(*s)
+ return self.root.update(*s)
def __iter__(self) -> Iterator[DictKey]: # type: ignore
- return self.__root__.__iter__()
+ return self.root.__iter__()
def get(self, key: DictKey, default: DictValue | None = None):
- return self.__root__.get(key, default)
+ return self.root.get(key, default)
def setdefault(self, key: DictKey, default: DictValue):
- return self.__root__.setdefault(key, default)
+ return self.root.setdefault(key, default)
def __len__(self) -> int:
- return self.__root__.__len__()
+ return self.root.__len__()
DataT = TypeVar("DataT")
-class ListModel(GenericModel, Generic[DataT]):
- __root__: list[DataT]
+class ListModel(RootModel[list[DataT]], Generic[DataT]):
+ root: list[DataT]
def __iter__(self):
- return iter(self.__root__)
+ return iter(self.root)
def __getitem__(self, item):
- return self.__root__[item]
+ return self.root[item]
def __len__(self):
- return len(self.__root__)
+ return len(self.root)
-class Envelope(GenericModel, Generic[DataT]):
+class Envelope(BaseModel, Generic[DataT]):
data: DataT | None = None
error: Any | None = None
@classmethod
def from_data(cls, obj: Any) -> "Envelope":
- return cls.parse_obj({"data": obj})
+ return cls.model_validate({"data": obj})
diff --git a/packages/models-library/src/models_library/groups.py b/packages/models-library/src/models_library/groups.py
index 3be5d1663c83..488776b6d8ea 100644
--- a/packages/models-library/src/models_library/groups.py
+++ b/packages/models-library/src/models_library/groups.py
@@ -1,7 +1,7 @@
import enum
-from typing import Any, ClassVar, Final
+from typing import Final
-from pydantic import BaseModel, Field, validator
+from pydantic import BaseModel, ConfigDict, Field, field_validator
from pydantic.types import PositiveInt
from .utils.common_validators import create_enums_pre_validator
@@ -28,16 +28,15 @@ class Group(BaseModel):
group_type: GroupTypeInModel = Field(..., alias="type")
thumbnail: str | None
- _from_equivalent_enums = validator("group_type", allow_reuse=True, pre=True)(
+ _from_equivalent_enums = field_validator("group_type", mode="before")(
create_enums_pre_validator(GroupTypeInModel)
)
class GroupAtDB(Group):
- class Config:
- orm_mode = True
-
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ from_attributes=True,
+ json_schema_extra={
"example": {
"gid": 218,
"name": "Friends group",
@@ -45,4 +44,5 @@ class Config:
"type": "standard",
"thumbnail": "https://image.flaticon.com/icons/png/512/23/23374.png",
}
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/invitations.py b/packages/models-library/src/models_library/invitations.py
index f7f8328b9be6..b1a8e4154e91 100644
--- a/packages/models-library/src/models_library/invitations.py
+++ b/packages/models-library/src/models_library/invitations.py
@@ -1,7 +1,7 @@
from datetime import datetime, timezone
from typing import Final
-from pydantic import BaseModel, EmailStr, Field, PositiveInt, validator
+from pydantic import BaseModel, EmailStr, Field, PositiveInt, field_validator
from .products import ProductName
@@ -35,7 +35,7 @@ class InvitationInputs(BaseModel):
description="If None, it will use INVITATIONS_DEFAULT_PRODUCT",
)
- @validator("issuer", pre=True)
+ @field_validator("issuer", mode="before")
@classmethod
def trim_long_issuers_to_max_length(cls, v):
if v and isinstance(v, str):
@@ -57,7 +57,7 @@ def create_from_inputs(
cls, invitation_inputs: InvitationInputs, default_product: ProductName
) -> "InvitationContent":
- kwargs = invitation_inputs.dict(exclude_none=True)
+ kwargs = invitation_inputs.model_dump(exclude_none=True)
kwargs.setdefault("product", default_product)
return cls(
created=datetime.now(tz=timezone.utc),
diff --git a/packages/models-library/src/models_library/osparc_variable_identifier.py b/packages/models-library/src/models_library/osparc_variable_identifier.py
index 71e4779b2ad1..a4b0d4e882b4 100644
--- a/packages/models-library/src/models_library/osparc_variable_identifier.py
+++ b/packages/models-library/src/models_library/osparc_variable_identifier.py
@@ -1,30 +1,24 @@
from copy import deepcopy
from typing import Any, TypeVar
-from pydantic import BaseModel, Field
-from pydantic.errors import PydanticErrorMixin
+from models_library.basic_types import ConstrainedStr
+from models_library.errors_classes import OsparcErrorMixin
+from pydantic import BaseModel
from .utils.string_substitution import OSPARC_IDENTIFIER_PREFIX
T = TypeVar("T")
-class OsparcVariableIdentifier(BaseModel):
+class OsparcVariableIdentifier(ConstrainedStr):
# NOTE: To allow parametrized value, set the type to Union[OsparcVariableIdentifier, ...]
# NOTE: When dealing with str types, to avoid unexpected behavior, the following
# order is suggested `OsparcVariableIdentifier | str`
- __root__: str = Field(
- ...,
- # NOTE: in below regex `{`` and `}` are respectively escaped with `{{` and `}}`
- regex=rf"^\${{1,2}}(?:\{{)?{OSPARC_IDENTIFIER_PREFIX}[A-Za-z0-9_]+(?:\}})?(:-.+)?$",
+ # NOTE: in below regex `{`` and `}` are respectively escaped with `{{` and `}}`
+ pattern = (
+ rf"^\${{1,2}}(?:\{{)?{OSPARC_IDENTIFIER_PREFIX}[A-Za-z0-9_]+(?:\}})?(:-.+)?$"
)
- def __hash__(self):
- return hash(str(self.__root__))
-
- def __eq__(self, other):
- return self.__root__ == other.__root__
-
def _get_without_template_markers(self) -> str:
# $VAR
# ${VAR}
@@ -32,7 +26,7 @@ def _get_without_template_markers(self) -> str:
# ${VAR:-default}
# ${VAR:-{}}
return (
- self.__root__.removeprefix("$$")
+ self.removeprefix("$$")
.removeprefix("$")
.removeprefix("{")
.removesuffix("}")
@@ -48,7 +42,7 @@ def default_value(self) -> str | None:
return parts[1] if len(parts) > 1 else None
-class UnresolvedOsparcVariableIdentifierError(PydanticErrorMixin, TypeError):
+class UnresolvedOsparcVariableIdentifierError(OsparcErrorMixin, TypeError):
msg_template = "Provided argument is unresolved: value={value}"
diff --git a/packages/models-library/src/models_library/payments.py b/packages/models-library/src/models_library/payments.py
index 7a4ec846575e..ff704ab7d2e6 100644
--- a/packages/models-library/src/models_library/payments.py
+++ b/packages/models-library/src/models_library/payments.py
@@ -1,7 +1,7 @@
from decimal import Decimal
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
-from pydantic import BaseModel, Field, validator
+from pydantic import BaseModel, ConfigDict, Field, field_validator
from .emails import LowerCaseEmailStr
from .products import StripePriceID, StripeTaxRateID
@@ -19,15 +19,8 @@ class UserInvoiceAddress(BaseModel):
description="Currently validated in webserver via pycountry library. Two letter country code alpha_2 expected.",
)
- @validator("*", pre=True)
- @classmethod
- def parse_empty_string_as_null(cls, v):
- if isinstance(v, str) and len(v.strip()) == 0:
- return None
- return v
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"line1": None,
@@ -38,6 +31,14 @@ class Config:
},
]
}
+ )
+
+ @field_validator("*", mode="before")
+ @classmethod
+ def parse_empty_string_as_null(cls, v):
+ if isinstance(v, str) and len(v.strip()) == 0:
+ return None
+ return v
class InvoiceDataGet(BaseModel):
@@ -48,18 +49,17 @@ class InvoiceDataGet(BaseModel):
user_display_name: str
user_email: LowerCaseEmailStr
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
- "credit_amount": Decimal(15.5),
+ "credit_amount": Decimal(15.5), # type: ignore[dict-item]
"stripe_price_id": "stripe-price-id",
"stripe_tax_rate_id": "stripe-tax-rate-id",
- "user_invoice_address": UserInvoiceAddress.Config.schema_extra[
- "examples"
- ][0],
+ "user_invoice_address": UserInvoiceAddress.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"user_display_name": "My Name",
- "user_email": LowerCaseEmailStr("email@example.itis"),
+ "user_email": "email@example.itis",
},
]
}
+ )
diff --git a/packages/models-library/src/models_library/products.py b/packages/models-library/src/models_library/products.py
index c38281d9f6a1..51c44a83d478 100644
--- a/packages/models-library/src/models_library/products.py
+++ b/packages/models-library/src/models_library/products.py
@@ -1,7 +1,7 @@
from decimal import Decimal
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
ProductName: TypeAlias = str
StripePriceID: TypeAlias = str
@@ -12,20 +12,20 @@ class CreditResultGet(BaseModel):
product_name: ProductName
credit_amount: Decimal = Field(..., description="")
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
- {"product_name": "s4l", "credit_amount": Decimal(15.5)},
+ {"product_name": "s4l", "credit_amount": Decimal(15.5)}, # type: ignore[dict-item]
]
}
+ )
class ProductStripeInfoGet(BaseModel):
stripe_price_id: StripePriceID
stripe_tax_rate_id: StripeTaxRateID
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"stripe_price_id": "stripe-price-id",
@@ -33,3 +33,4 @@ class Config:
},
]
}
+ )
diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py
index 788331b103e8..da2829b0c94b 100644
--- a/packages/models-library/src/models_library/progress_bar.py
+++ b/packages/models-library/src/models_library/progress_bar.py
@@ -1,6 +1,6 @@
-from typing import Any, ClassVar, Literal, TypeAlias
+from typing import Literal, TypeAlias
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
from .basic_types import IDStr
@@ -15,8 +15,8 @@ class ProgressStructuredMessage(BaseModel):
unit: str | None
sub: "ProgressStructuredMessage | None"
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"description": "some description",
@@ -42,6 +42,7 @@ class Config:
},
]
}
+ )
UNITLESS = None
@@ -77,9 +78,9 @@ def composed_message(self) -> str:
return msg
- class Config:
- frozen = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ frozen=True,
+ json_schema_extra={
"examples": [
# typical percent progress (no units)
{
@@ -96,9 +97,8 @@ class Config:
{
"actual_value": 0.3,
"total": 1.0,
- "message": ProgressStructuredMessage.Config.schema_extra[
- "examples"
- ][2],
+ "message": ProgressStructuredMessage.model_config["json_schema_extra"]["examples"][2], # type: ignore [index]
},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py
index 6c2036caa5a5..440bfe915bb4 100644
--- a/packages/models-library/src/models_library/projects.py
+++ b/packages/models-library/src/models_library/projects.py
@@ -1,18 +1,17 @@
"""
Models a study's project document
"""
-import re
from copy import deepcopy
from datetime import datetime
from enum import Enum
from typing import Any, Final, TypeAlias
from uuid import UUID
+from models_library.basic_types import ConstrainedStr
from models_library.workspaces import WorkspaceID
-from pydantic import BaseModel, ConstrainedStr, Extra, Field, validator
+from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator
from .basic_regex import DATE_RE, UUID_RE_BASE
-from .basic_types import HttpUrlWithCustomMinLength
from .emails import LowerCaseEmailStr
from .projects_access import AccessRights, GroupIDStr
from .projects_nodes import Node
@@ -32,17 +31,11 @@
class ProjectIDStr(ConstrainedStr):
- regex = re.compile(UUID_RE_BASE)
-
- class Config:
- frozen = True
+ pattern = UUID_RE_BASE
class DateTimeStr(ConstrainedStr):
- regex = re.compile(DATE_RE)
-
- class Config:
- frozen = True
+ pattern = DATE_RE
@classmethod
def to_datetime(cls, s: "DateTimeStr"):
@@ -73,7 +66,7 @@ class BaseProjectModel(BaseModel):
description="longer one-line description about the project",
examples=["Dabbling in temporal transitions ..."],
)
- thumbnail: HttpUrlWithCustomMinLength | None = Field(
+ thumbnail: HttpUrl | None = Field(
...,
description="url of the project thumbnail",
examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"],
@@ -86,11 +79,11 @@ class BaseProjectModel(BaseModel):
workbench: NodesDict = Field(..., description="Project's pipeline")
# validators
- _empty_thumbnail_is_none = validator("thumbnail", allow_reuse=True, pre=True)(
+ _empty_thumbnail_is_none = field_validator("thumbnail", mode="before")(
empty_str_to_none_pre_validator
)
- _none_description_is_empty = validator("description", allow_reuse=True, pre=True)(
+ _none_description_is_empty = field_validator("description", mode="before")(
none_to_empty_str_pre_validator
)
@@ -108,17 +101,16 @@ class ProjectAtDB(BaseProjectModel):
False, description="Defines if a study is available publicly"
)
- @validator("project_type", pre=True)
+ @field_validator("project_type", mode="before")
@classmethod
def convert_sql_alchemy_enum(cls, v):
if isinstance(v, Enum):
return v.value
return v
- class Config:
- orm_mode = True
- use_enum_values = True
- allow_population_by_field_name = True
+ model_config = ConfigDict(
+ from_attributes=True, use_enum_values=True, populate_by_name=True
+ )
class Project(BaseProjectModel):
@@ -180,18 +172,16 @@ class Project(BaseProjectModel):
alias="workspaceId",
)
- class Config:
- description = "Document that stores metadata, pipeline and UI setup of a study"
- title = "osparc-simcore project"
- extra = Extra.forbid
-
- @staticmethod
- def schema_extra(schema: dict, _model: "Project"):
- # pylint: disable=unsubscriptable-object
-
- # Patch to allow jsonschema nullable
- # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530
- state_pydantic_schema = deepcopy(schema["properties"]["state"])
- schema["properties"]["state"] = {
- "anyOf": [{"type": "null"}, state_pydantic_schema]
- }
+ def _patch_json_schema_extra(self, schema: dict) -> None:
+ # Patch to allow jsonschema nullable
+ # SEE https://github.com/samuelcolvin/pydantic/issues/990#issuecomment-645961530
+ state_pydantic_schema = deepcopy(schema["properties"]["state"])
+ schema["properties"]["state"] = {
+ "anyOf": [{"type": "null"}, state_pydantic_schema]
+ }
+
+ model_config = ConfigDict(
+ title="osparc-simcore project",
+ extra="forbid",
+ json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item]
+ )
diff --git a/packages/models-library/src/models_library/projects_access.py b/packages/models-library/src/models_library/projects_access.py
index 1b800c6b0aed..29ca6c9f5921 100644
--- a/packages/models-library/src/models_library/projects_access.py
+++ b/packages/models-library/src/models_library/projects_access.py
@@ -3,9 +3,8 @@
"""
from enum import Enum
-from typing import Any, ClassVar
-from pydantic import BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field
from pydantic.types import PositiveInt
from .basic_types import IDStr
@@ -26,33 +25,22 @@ class AccessRights(BaseModel):
read: bool = Field(..., description="has read access")
write: bool = Field(..., description="has write access")
delete: bool = Field(..., description="has deletion rights")
-
- class Config:
- extra = Extra.forbid
-
-
-class PositiveIntWithExclusiveMinimumRemoved(PositiveInt):
- # As we are trying to match this Pydantic model to a historical json schema "project-v0.0.1" we need to remove this
- # Pydantic does not support exclusiveMinimum boolean https://github.com/pydantic/pydantic/issues/4108
- @classmethod
- def __modify_schema__(cls, field_schema):
- field_schema.pop("exclusiveMinimum", None)
+ model_config = ConfigDict(extra="forbid")
class Owner(BaseModel):
- user_id: PositiveIntWithExclusiveMinimumRemoved = Field(
- ..., description="Owner's user id"
- )
+ user_id: PositiveInt = Field(..., description="Owner's user id")
first_name: FirstNameStr | None = Field(..., description="Owner's first name")
last_name: LastNameStr | None = Field(..., description="Owner's last name")
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
# NOTE: None and empty string are both defining an undefined value
{"user_id": 1, "first_name": None, "last_name": None},
{"user_id": 2, "first_name": "", "last_name": ""},
{"user_id": 3, "first_name": "John", "last_name": "Smith"},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/projects_comments.py b/packages/models-library/src/models_library/projects_comments.py
index 234ec638a4a8..88937d83d78c 100644
--- a/packages/models-library/src/models_library/projects_comments.py
+++ b/packages/models-library/src/models_library/projects_comments.py
@@ -1,7 +1,7 @@
from datetime import datetime
from typing import TypeAlias
-from pydantic import BaseModel, Extra, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt
from .projects import ProjectID
from .users import UserID
@@ -33,12 +33,8 @@ class _ProjectsCommentsBase(BaseModel):
class ProjectsCommentsDB(_ProjectsCommentsBase):
- class Config:
- extra = Extra.forbid
- validation = False
+ model_config = ConfigDict(extra="forbid")
class ProjectsCommentsAPI(_ProjectsCommentsBase):
- class Config:
- extra = Extra.forbid
- validation = False
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/projects_networks.py b/packages/models-library/src/models_library/projects_networks.py
index e0775ccb5d5f..ee255dd80ff8 100644
--- a/packages/models-library/src/models_library/projects_networks.py
+++ b/packages/models-library/src/models_library/projects_networks.py
@@ -1,7 +1,7 @@
import re
-from typing import Any, ClassVar, Final
+from typing import Annotated, Final, TypeAlias
-from pydantic import BaseModel, ConstrainedStr, Field
+from pydantic import BaseModel, ConfigDict, Field, StringConstraints
from .generics import DictModel
from .projects import ProjectID
@@ -12,12 +12,9 @@
PROJECT_NETWORK_PREFIX: Final[str] = "prj-ntwrk"
-class DockerNetworkName(ConstrainedStr):
- regex = SERVICE_NETWORK_RE
+DockerNetworkName: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_NETWORK_RE)]
-
-class DockerNetworkAlias(ConstrainedStr):
- regex = SERVICE_NETWORK_RE
+DockerNetworkAlias: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_NETWORK_RE)]
class ContainerAliases(DictModel[NodeIDStr, DockerNetworkAlias]):
@@ -25,8 +22,8 @@ class ContainerAliases(DictModel[NodeIDStr, DockerNetworkAlias]):
class NetworksWithAliases(DictModel[DockerNetworkName, ContainerAliases]):
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"network_one": {
@@ -36,6 +33,7 @@ class Config:
},
]
}
+ )
class ProjectsNetworks(BaseModel):
@@ -47,10 +45,9 @@ class ProjectsNetworks(BaseModel):
"is given a user defined alias by which it is identified on the network."
),
)
-
- class Config:
- orm_mode = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ from_attributes=True,
+ json_schema_extra={
"example": {
"project_uuid": "ec5cdfea-f24e-4aa1-83b8-6dccfdc8cf4d",
"networks_with_aliases": {
@@ -60,4 +57,5 @@ class Config:
}
},
}
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py
index 318f7149ab4e..b074dd4dec6c 100644
--- a/packages/models-library/src/models_library/projects_nodes.py
+++ b/packages/models-library/src/models_library/projects_nodes.py
@@ -3,21 +3,22 @@
"""
from copy import deepcopy
-from typing import Any, ClassVar, TypeAlias, Union
+from typing import Annotated, Any, TypeAlias, Union
from pydantic import (
BaseModel,
- ConstrainedStr,
- Extra,
+ ConfigDict,
Field,
+ HttpUrl,
Json,
StrictBool,
StrictFloat,
StrictInt,
- validator,
+ StringConstraints,
+ field_validator,
)
-from .basic_types import EnvVarKey, HttpUrlWithCustomMinLength, KeyIDStr
+from .basic_types import EnvVarKey, KeyIDStr
from .projects_access import AccessEnum
from .projects_nodes_io import (
DatCoreFileLink,
@@ -58,12 +59,15 @@
InputID: TypeAlias = KeyIDStr
OutputID: TypeAlias = KeyIDStr
-InputsDict: TypeAlias = dict[InputID, InputTypes]
-OutputsDict: TypeAlias = dict[OutputID, OutputTypes]
-
+# union_mode="smart" by default for Pydantic>=2: https://docs.pydantic.dev/latest/concepts/unions/#union-modes
+InputsDict: TypeAlias = dict[
+ InputID, Annotated[InputTypes, Field(union_mode="left_to_right")]
+]
+OutputsDict: TypeAlias = dict[
+ OutputID, Annotated[OutputTypes, Field(union_mode="left_to_right")]
+]
-class UnitStr(ConstrainedStr):
- strip_whitespace = True
+UnitStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True)]
class NodeState(BaseModel):
@@ -85,10 +89,9 @@ class NodeState(BaseModel):
le=1.0,
description="current progress of the task if available (None if not started or not a computational task)",
)
-
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{
"modified": True,
@@ -106,7 +109,8 @@ class Config:
"currentStatus": "SUCCESS",
},
]
- }
+ },
+ )
class Node(BaseModel):
@@ -134,7 +138,7 @@ class Node(BaseModel):
description="the node progress value (deprecated in DB, still used for API only)",
deprecated=True,
)
- thumbnail: HttpUrlWithCustomMinLength | None = Field(
+ thumbnail: Annotated[str, HttpUrl] | None = Field(
default=None,
description="url of the latest screenshot of the node",
examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"],
@@ -208,7 +212,7 @@ class Node(BaseModel):
),
)
- @validator("thumbnail", pre=True)
+ @field_validator("thumbnail", mode="before")
@classmethod
def convert_empty_str_to_none(cls, v):
if isinstance(v, str) and v == "":
@@ -221,7 +225,7 @@ def convert_old_enum_name(cls, v) -> RunningState:
return RunningState.FAILED
return RunningState(v)
- @validator("state", pre=True)
+ @field_validator("state", mode="before")
@classmethod
def convert_from_enum(cls, v):
if isinstance(v, str):
@@ -230,16 +234,16 @@ def convert_from_enum(cls, v):
return NodeState(currentStatus=running_state_value)
return v
- class Config:
- extra = Extra.forbid
-
+ def _patch_json_schema_extra(self, schema: dict) -> None:
# NOTE: exporting without this trick does not make runHash as nullable.
# It is a Pydantic issue see https://github.com/samuelcolvin/pydantic/issues/1270
- @staticmethod
- def schema_extra(schema, _model: "Node"):
- # SEE https://swagger.io/docs/specification/data-models/data-types/#Null
- for prop_name in ["parent", "runHash"]:
- if prop_name in schema.get("properties", {}):
- prop = deepcopy(schema["properties"][prop_name])
- prop["nullable"] = True
- schema["properties"][prop_name] = prop
+ for prop_name in ["parent", "runHash"]:
+ if prop_name in schema.get("properties", {}):
+ prop = deepcopy(schema["properties"][prop_name])
+ prop["nullable"] = True
+ schema["properties"][prop_name] = prop
+
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra=_patch_json_schema_extra, # type: ignore[typeddict-item]
+ )
diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py
index b2d884854896..876c2f717449 100644
--- a/packages/models-library/src/models_library/projects_nodes_io.py
+++ b/packages/models-library/src/models_library/projects_nodes_io.py
@@ -8,18 +8,19 @@
import re
from pathlib import Path
-from typing import Any, ClassVar, TypeAlias
+from typing import Annotated, TypeAlias
from uuid import UUID
-from models_library.basic_types import KeyIDStr
+from models_library.basic_types import ConstrainedStr, KeyIDStr
from pydantic import (
AnyUrl,
BaseModel,
- ConstrainedStr,
- Extra,
+ ConfigDict,
Field,
- parse_obj_as,
- validator,
+ StringConstraints,
+ TypeAdapter,
+ ValidationInfo,
+ field_validator,
)
from .basic_regex import (
@@ -31,10 +32,7 @@
NodeID = UUID
-
-class UUIDStr(ConstrainedStr):
- regex: re.Pattern[str] | None = re.compile(UUID_RE)
-
+UUIDStr: TypeAlias = Annotated[str, StringConstraints(pattern=UUID_RE)]
NodeIDStr = UUIDStr
@@ -43,7 +41,7 @@ class UUIDStr(ConstrainedStr):
class SimcoreS3FileID(ConstrainedStr):
- regex: re.Pattern[str] | None = re.compile(SIMCORE_S3_FILE_ID_RE)
+ pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_FILE_ID_RE)
class SimcoreS3DirectoryID(ConstrainedStr):
@@ -52,7 +50,7 @@ class SimcoreS3DirectoryID(ConstrainedStr):
`{project_id}/{node_id}/simcore-dir-name/`
"""
- regex: re.Pattern[str] | None = re.compile(SIMCORE_S3_DIRECTORY_ID_RE)
+ pattern: re.Pattern[str] | None = re.compile(SIMCORE_S3_DIRECTORY_ID_RE)
@staticmethod
def _get_parent(s3_object: str, *, parent_index: int) -> str:
@@ -72,8 +70,8 @@ def _get_parent(s3_object: str, *, parent_index: int) -> str:
raise ValueError(msg) from err
@classmethod
- def validate(cls, value: str) -> str:
- value = super().validate(value)
+ def _validate(cls, __input_value: str) -> str:
+ value = super()._validate(__input_value)
value = value.rstrip("/")
parent = cls._get_parent(value, parent_index=3)
@@ -86,7 +84,7 @@ def validate(cls, value: str) -> str:
@classmethod
def from_simcore_s3_object(cls, s3_object: str) -> "SimcoreS3DirectoryID":
parent_path: str = cls._get_parent(s3_object, parent_index=4)
- return parse_obj_as(cls, f"{parent_path}/")
+ return TypeAdapter(cls).validate_python(f"{parent_path}/")
class DatCoreFileID(ConstrainedStr):
@@ -108,10 +106,9 @@ class PortLink(BaseModel):
...,
description="The port key in the node given by nodeUuid",
)
-
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
# minimal
{
@@ -119,25 +116,26 @@ class Config:
"output": "out_2",
}
],
- }
+ },
+ )
class DownloadLink(BaseModel):
"""I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)"""
- download_link: AnyUrl = Field(..., alias="downloadLink")
+ download_link: Annotated[str, AnyUrl] = Field(..., alias="downloadLink")
label: str | None = Field(default=None, description="Display name")
-
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
# minimal
{
"downloadLink": "https://fakeimg.pl/250x100/",
}
],
- }
+ },
+ )
## CUSTOM STORAGE SERVICES -----------
@@ -155,8 +153,7 @@ class BaseFileLink(BaseModel):
)
label: str | None = Field(
- default=None,
- description="The real file name",
+ default=None, description="The real file name", validate_default=True
)
e_tag: str | None = Field(
@@ -165,7 +162,7 @@ class BaseFileLink(BaseModel):
alias="eTag",
)
- @validator("store", pre=True)
+ @field_validator("store", mode="before")
@classmethod
def legacy_enforce_str_to_int(cls, v):
# SEE example 'legacy: store as string'
@@ -182,7 +179,7 @@ class SimCoreFileLink(BaseFileLink):
deprecated=True,
)
- @validator("store", always=True)
+ @field_validator("store")
@classmethod
def check_discriminator(cls, v):
"""Used as discriminator to cast to this class"""
@@ -191,16 +188,16 @@ def check_discriminator(cls, v):
raise ValueError(msg)
return 0
- @validator("label", always=True, pre=True)
+ @field_validator("label", mode="before")
@classmethod
- def pre_fill_label_with_filename_ext(cls, v, values):
- if v is None and "path" in values:
- return Path(values["path"]).name
+ def pre_fill_label_with_filename_ext(cls, v, info: ValidationInfo):
+ if v is None and "path" in info.data:
+ return Path(info.data["path"]).name
return v
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{
"store": 0,
@@ -225,7 +222,8 @@ class Config:
"path": "94453a6a-c8d4-52b3-a22d-ccbf81f8d636/d4442ca4-23fd-5b6b-ba6d-0b75f711c109/y_1D.txt",
},
],
- }
+ },
+ )
class DatCoreFileLink(BaseFileLink):
@@ -241,7 +239,7 @@ class DatCoreFileLink(BaseFileLink):
description="Unique identifier to access the dataset on datcore (REQUIRED for datcore)",
)
- @validator("store", always=True)
+ @field_validator("store")
@classmethod
def check_discriminator(cls, v):
"""Used as discriminator to cast to this class"""
@@ -251,9 +249,9 @@ def check_discriminator(cls, v):
raise ValueError(msg)
return 1
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{
# minimal
@@ -270,7 +268,8 @@ class Config:
"label": "initial_WTstates",
},
],
- }
+ },
+ )
# Bundles all model links to a file vs PortLink
diff --git a/packages/models-library/src/models_library/projects_nodes_ui.py b/packages/models-library/src/models_library/projects_nodes_ui.py
index aa55332ccbae..e14f2b21a284 100644
--- a/packages/models-library/src/models_library/projects_nodes_ui.py
+++ b/packages/models-library/src/models_library/projects_nodes_ui.py
@@ -2,20 +2,18 @@
Models node UI (legacy model, use instead projects.ui.py)
"""
-from pydantic import BaseModel, Extra, Field
-from pydantic.color import Color
+from pydantic import BaseModel, ConfigDict, Field
+from pydantic_extra_types.color import Color
class Position(BaseModel):
- x: int = Field(..., description="The x position", example=["12"])
- y: int = Field(..., description="The y position", example=["15"])
+ x: int = Field(..., description="The x position", examples=[["12"]])
+ y: int = Field(..., description="The y position", examples=[["15"]])
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class Marker(BaseModel):
color: Color = Field(...)
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/projects_pipeline.py b/packages/models-library/src/models_library/projects_pipeline.py
index 2139d1820438..975d4726b4e4 100644
--- a/packages/models-library/src/models_library/projects_pipeline.py
+++ b/packages/models-library/src/models_library/projects_pipeline.py
@@ -1,9 +1,8 @@
import datetime
-from typing import Any, ClassVar
from uuid import UUID
import arrow
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt
from .clusters import ClusterID
from .projects_nodes import NodeState
@@ -58,8 +57,8 @@ class ComputationTask(BaseModel):
description="task last modification timestamp or None if the there is no task",
)
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"id": "42838344-03de-4ce2-8d93-589a5dcdfd05",
@@ -89,9 +88,9 @@ class Config:
},
"iteration": None,
"cluster_id": None,
- "started": arrow.utcnow().shift(minutes=-50).datetime,
+ "started": arrow.utcnow().shift(minutes=-50).datetime, # type: ignore[dict-item]
"stopped": None,
- "submitted": arrow.utcnow().shift(hours=-1).datetime,
+ "submitted": arrow.utcnow().shift(hours=-1).datetime, # type: ignore[dict-item]
},
{
"id": "f81d7994-9ccc-4c95-8c32-aa70d6bbb1b0",
@@ -121,9 +120,10 @@ class Config:
},
"iteration": 2,
"cluster_id": 0,
- "started": arrow.utcnow().shift(minutes=-50).datetime,
- "stopped": arrow.utcnow().shift(minutes=-20).datetime,
- "submitted": arrow.utcnow().shift(hours=-1).datetime,
+ "started": arrow.utcnow().shift(minutes=-50).datetime, # type: ignore[dict-item]
+ "stopped": arrow.utcnow().shift(minutes=-20).datetime, # type: ignore[dict-item]
+ "submitted": arrow.utcnow().shift(hours=-1).datetime, # type: ignore[dict-item]
},
]
}
+ )
diff --git a/packages/models-library/src/models_library/projects_state.py b/packages/models-library/src/models_library/projects_state.py
index 38c68d5d4a4d..45a920a464cb 100644
--- a/packages/models-library/src/models_library/projects_state.py
+++ b/packages/models-library/src/models_library/projects_state.py
@@ -3,9 +3,8 @@
"""
from enum import Enum, unique
-from typing import Any, ClassVar
-from pydantic import BaseModel, Extra, Field, validator
+from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
from .projects_access import Owner
@@ -57,14 +56,15 @@ class ProjectStatus(str, Enum):
class ProjectLocked(BaseModel):
value: bool = Field(..., description="True if the project is locked")
owner: Owner | None = Field(
- default=None, description="If locked, the user that owns the lock"
+ default=None,
+ description="If locked, the user that owns the lock",
+ validate_default=True,
)
status: ProjectStatus = Field(..., description="The status of the project")
-
- class Config:
- extra = Extra.forbid
- use_enum_values = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ use_enum_values=True,
+ json_schema_extra={
"examples": [
{"value": False, "status": ProjectStatus.CLOSED},
{
@@ -77,24 +77,25 @@ class Config:
},
},
]
- }
+ },
+ )
- @validator("owner", pre=True, always=True)
+ @field_validator("owner", mode="before")
@classmethod
- def check_not_null(cls, v, values):
- if values["value"] is True and v is None:
+ def check_not_null(cls, v, info: ValidationInfo):
+ if info.data["value"] is True and v is None:
msg = "value cannot be None when project is locked"
raise ValueError(msg)
return v
- @validator("status", always=True)
+ @field_validator("status")
@classmethod
- def check_status_compatible(cls, v, values):
- if values["value"] is False and v not in ["CLOSED", "OPENED"]:
- msg = f"status is set to {v} and lock is set to {values['value']}!"
+ def check_status_compatible(cls, v, info: ValidationInfo):
+ if info.data["value"] is False and v not in ["CLOSED", "OPENED"]:
+ msg = f"status is set to {v} and lock is set to {info.data['value']}!"
raise ValueError(msg)
- if values["value"] is True and v == "CLOSED":
- msg = f"status is set to {v} and lock is set to {values['value']}!"
+ if info.data["value"] is True and v == "CLOSED":
+ msg = f"status is set to {v} and lock is set to {info.data['value']}!"
raise ValueError(msg)
return v
@@ -104,13 +105,11 @@ class ProjectRunningState(BaseModel):
..., description="The running state of the project", examples=["STARTED"]
)
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class ProjectState(BaseModel):
locked: ProjectLocked = Field(..., description="The project lock state")
state: ProjectRunningState = Field(..., description="The project running state")
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/projects_ui.py b/packages/models-library/src/models_library/projects_ui.py
index 154007a2a6d0..93aa68d628bf 100644
--- a/packages/models-library/src/models_library/projects_ui.py
+++ b/packages/models-library/src/models_library/projects_ui.py
@@ -2,10 +2,11 @@
Models Front-end UI
"""
-from typing import Any, ClassVar, Literal, TypedDict
+from typing import Literal
-from pydantic import BaseModel, Extra, Field, validator
-from pydantic.color import Color
+from pydantic import BaseModel, ConfigDict, Field, field_validator
+from pydantic_extra_types.color import Color
+from typing_extensions import TypedDict
from .projects_nodes_io import NodeID, NodeIDStr
from .projects_nodes_ui import Marker, Position
@@ -15,9 +16,7 @@
class WorkbenchUI(BaseModel):
position: Position = Field(..., description="The node position in the workbench")
marker: Marker | None = None
-
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class _SlideshowRequired(TypedDict):
@@ -32,10 +31,9 @@ class Annotation(BaseModel):
type: Literal["note", "rect", "text"] = Field(...)
color: Color = Field(...)
attributes: dict = Field(..., description="svg attributes")
-
- class Config:
- extra = Extra.forbid
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{
"type": "note",
@@ -60,7 +58,8 @@ class Config:
"attributes": {"x": 415, "y": 100, "text": "Hey!"},
},
]
- }
+ },
+ )
class StudyUI(BaseModel):
@@ -69,9 +68,8 @@ class StudyUI(BaseModel):
current_node_id: NodeID | None = Field(default=None, alias="currentNodeId")
annotations: dict[NodeIDStr, Annotation] | None = None
- class Config:
- extra = Extra.allow
+ model_config = ConfigDict(extra="allow")
- _empty_is_none = validator("*", allow_reuse=True, pre=True)(
+ _empty_is_none = field_validator("*", mode="before")(
empty_str_to_none_pre_validator
)
diff --git a/packages/models-library/src/models_library/rabbitmq_basic_types.py b/packages/models-library/src/models_library/rabbitmq_basic_types.py
index 022b66b9a9df..e8ae694b8be2 100644
--- a/packages/models-library/src/models_library/rabbitmq_basic_types.py
+++ b/packages/models-library/src/models_library/rabbitmq_basic_types.py
@@ -1,15 +1,15 @@
-import re
from typing import Final
-from pydantic import ConstrainedStr, parse_obj_as
+from models_library.basic_types import ConstrainedStr
+from pydantic import TypeAdapter
REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS: Final[str] = r"^[\w\-\.]*$"
class RPCNamespace(ConstrainedStr):
+ pattern = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS
min_length: int = 1
max_length: int = 252
- regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS)
@classmethod
def from_entries(cls, entries: dict[str, str]) -> "RPCNamespace":
@@ -18,10 +18,10 @@ def from_entries(cls, entries: dict[str, str]) -> "RPCNamespace":
Keeping this to a predefined length
"""
composed_string = "-".join(f"{k}_{v}" for k, v in sorted(entries.items()))
- return parse_obj_as(cls, composed_string)
+ return TypeAdapter(cls).validate_python(composed_string)
class RPCMethodName(ConstrainedStr):
+ pattern = REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS
min_length: int = 1
max_length: int = 252
- regex: re.Pattern[str] | None = re.compile(REGEX_RABBIT_QUEUE_ALLOWED_SYMBOLS)
diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py
index 07dab962281c..13ecda316ae8 100644
--- a/packages/models-library/src/models_library/rabbitmq_messages.py
+++ b/packages/models-library/src/models_library/rabbitmq_messages.py
@@ -30,12 +30,12 @@ class RabbitEventMessageType(str, Enum):
class RabbitMessageBase(BaseModel):
- channel_name: str = Field(..., const=True)
+ channel_name: str
@classmethod
def get_channel_name(cls) -> str:
# NOTE: this returns the channel type name
- name: str = cls.__fields__["channel_name"].default
+ name: str = cls.model_fields["channel_name"].default
return name
@abstractmethod
@@ -132,9 +132,7 @@ def routing_key(self) -> str | None:
class _RabbitAutoscalingBaseMessage(RabbitMessageBase):
- channel_name: Literal["io.simcore.autoscaling"] = Field(
- default="io.simcore.autoscaling", const=True
- )
+ channel_name: Literal["io.simcore.autoscaling"] = "io.simcore.autoscaling"
origin: str = Field(
..., description="autoscaling app type, in case there would be more than one"
)
@@ -177,9 +175,7 @@ class RabbitResourceTrackingMessageType(StrAutoEnum):
class RabbitResourceTrackingBaseMessage(RabbitMessageBase):
- channel_name: Literal["io.simcore.service.tracking"] = Field(
- default="io.simcore.service.tracking", const=True
- )
+ channel_name: Literal["io.simcore.service.tracking"] = "io.simcore.service.tracking"
service_run_id: str = Field(
..., description="uniquely identitifies the service run"
@@ -194,9 +190,9 @@ def routing_key(self) -> str | None:
class RabbitResourceTrackingStartedMessage(RabbitResourceTrackingBaseMessage):
- message_type: RabbitResourceTrackingMessageType = Field(
- default=RabbitResourceTrackingMessageType.TRACKING_STARTED, const=True
- )
+ message_type: Literal[
+ RabbitResourceTrackingMessageType.TRACKING_STARTED
+ ] = RabbitResourceTrackingMessageType.TRACKING_STARTED
wallet_id: WalletID | None
wallet_name: str | None
@@ -234,9 +230,9 @@ class RabbitResourceTrackingStartedMessage(RabbitResourceTrackingBaseMessage):
class RabbitResourceTrackingHeartbeatMessage(RabbitResourceTrackingBaseMessage):
- message_type: RabbitResourceTrackingMessageType = Field(
- default=RabbitResourceTrackingMessageType.TRACKING_HEARTBEAT, const=True
- )
+ message_type: Literal[
+ RabbitResourceTrackingMessageType.TRACKING_HEARTBEAT
+ ] = RabbitResourceTrackingMessageType.TRACKING_HEARTBEAT
class SimcorePlatformStatus(StrAutoEnum):
@@ -245,9 +241,9 @@ class SimcorePlatformStatus(StrAutoEnum):
class RabbitResourceTrackingStoppedMessage(RabbitResourceTrackingBaseMessage):
- message_type: RabbitResourceTrackingMessageType = Field(
- default=RabbitResourceTrackingMessageType.TRACKING_STOPPED, const=True
- )
+ message_type: Literal[
+ RabbitResourceTrackingMessageType.TRACKING_STOPPED
+ ] = RabbitResourceTrackingMessageType.TRACKING_STOPPED
simcore_platform_status: SimcorePlatformStatus = Field(
...,
@@ -263,9 +259,7 @@ class RabbitResourceTrackingStoppedMessage(RabbitResourceTrackingBaseMessage):
class WalletCreditsMessage(RabbitMessageBase):
- channel_name: Literal["io.simcore.service.wallets"] = Field(
- default="io.simcore.service.wallets", const=True
- )
+ channel_name: Literal["io.simcore.service.wallets"] = "io.simcore.service.wallets"
created_at: datetime.datetime = Field(
default_factory=lambda: arrow.utcnow().datetime,
description="message creation datetime",
@@ -283,9 +277,9 @@ class CreditsLimit(IntEnum):
class WalletCreditsLimitReachedMessage(RabbitMessageBase):
- channel_name: Literal["io.simcore.service.wallets-credit-limit-reached"] = Field(
- default="io.simcore.service.wallets-credit-limit-reached", const=True
- )
+ channel_name: Literal[
+ "io.simcore.service.wallets-credit-limit-reached"
+ ] = "io.simcore.service.wallets-credit-limit-reached"
created_at: datetime.datetime = Field(
default_factory=lambda: arrow.utcnow().datetime,
description="message creation datetime",
diff --git a/packages/models-library/src/models_library/resource_tracker.py b/packages/models-library/src/models_library/resource_tracker.py
index 13c92e161ed1..c3b42a087957 100644
--- a/packages/models-library/src/models_library/resource_tracker.py
+++ b/packages/models-library/src/models_library/resource_tracker.py
@@ -2,16 +2,16 @@
from datetime import datetime, timezone
from decimal import Decimal
from enum import IntEnum, auto
-from typing import Any, ClassVar, NamedTuple, TypeAlias
+from typing import NamedTuple, TypeAlias
from pydantic import (
BaseModel,
ByteSize,
- Extra,
+ ConfigDict,
Field,
NonNegativeInt,
PositiveInt,
- validator,
+ field_validator,
)
from .products import ProductName
@@ -59,26 +59,28 @@ class PricingInfo(BaseModel):
pricing_unit_id: PricingUnitId
pricing_unit_cost_id: PricingUnitCostId
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"pricing_plan_id": 1, "pricing_unit_id": 1, "pricing_unit_cost_id": 1}
]
}
+ )
class HardwareInfo(BaseModel):
aws_ec2_instances: list[str]
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{"aws_ec2_instances": ["c6a.4xlarge"]},
{"aws_ec2_instances": []},
]
}
+ )
- @validator("aws_ec2_instances")
+ @field_validator("aws_ec2_instances")
@classmethod
def warn_if_too_many_instances_are_present(cls, v: list[str]) -> list[str]:
if len(v) > 1:
@@ -106,10 +108,9 @@ class StartedAt(BaseModel):
from_: datetime | None = Field(None, alias="from")
until: datetime | None = Field(None)
- class Config:
- allow_population_by_field_name = True
+ model_config = ConfigDict(populate_by_name=True)
- @validator("from_", pre=True)
+ @field_validator("from_", mode="before")
@classmethod
def parse_from_filter(cls, v):
"""Parse the filters field."""
@@ -124,7 +125,7 @@ def parse_from_filter(cls, v):
return from_
return v
- @validator("until", pre=True)
+ @field_validator("until", mode="before")
@classmethod
def parse_until_filter(cls, v):
"""Parse the filters field."""
@@ -153,9 +154,8 @@ class PricingPlanCreate(BaseModel):
description: str
classification: PricingPlanClassification
pricing_plan_key: str
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"product_name": "osparc",
@@ -166,6 +166,7 @@ class Config:
}
]
}
+ )
class PricingPlanUpdate(BaseModel):
@@ -174,8 +175,8 @@ class PricingPlanUpdate(BaseModel):
description: str
is_active: bool
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"pricing_plan_id": 1,
@@ -185,6 +186,7 @@ class Config:
}
]
}
+ )
## Pricing Units
@@ -202,10 +204,10 @@ class UnitExtraInfo(BaseModel):
RAM: ByteSize
VRAM: ByteSize
- class Config:
- allow_population_by_field_name = True
- extra = Extra.allow
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ populate_by_name=True,
+ extra="allow",
+ json_schema_extra={
"examples": [
{
"CPU": 32,
@@ -215,7 +217,8 @@ class Config:
"custom key": "custom value",
}
]
- }
+ },
+ )
class PricingUnitWithCostCreate(BaseModel):
@@ -227,13 +230,13 @@ class PricingUnitWithCostCreate(BaseModel):
cost_per_unit: Decimal
comment: str
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"pricing_plan_id": 1,
"unit_name": "My pricing plan",
- "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0],
+ "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"default": True,
"specific_info": {"aws_ec2_instances": ["t3.medium"]},
"cost_per_unit": 10,
@@ -241,6 +244,7 @@ class Config:
}
]
}
+ )
class PricingUnitCostUpdate(BaseModel):
@@ -257,14 +261,14 @@ class PricingUnitWithCostUpdate(BaseModel):
specific_info: SpecificInfo
pricing_unit_cost_update: None | PricingUnitCostUpdate
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"pricing_plan_id": 1,
"pricing_unit_id": 1,
"unit_name": "My pricing plan",
- "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0],
+ "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"default": True,
"specific_info": {"aws_ec2_instances": ["t3.medium"]},
"pricing_unit_cost_update": {
@@ -276,13 +280,14 @@ class Config:
"pricing_plan_id": 1,
"pricing_unit_id": 1,
"unit_name": "My pricing plan",
- "unit_extra_info": UnitExtraInfo.Config.schema_extra["examples"][0],
+ "unit_extra_info": UnitExtraInfo.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
"default": True,
"specific_info": {"aws_ec2_instances": ["t3.medium"]},
"pricing_unit_cost_update": None,
},
]
}
+ )
class ServicesAggregatedUsagesType(StrAutoEnum):
diff --git a/packages/models-library/src/models_library/rest_ordering.py b/packages/models-library/src/models_library/rest_ordering.py
index c8a791343ee3..7b1b6b39c39e 100644
--- a/packages/models-library/src/models_library/rest_ordering.py
+++ b/packages/models-library/src/models_library/rest_ordering.py
@@ -1,6 +1,6 @@
from enum import Enum
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from .basic_types import IDStr
@@ -16,5 +16,4 @@ class OrderBy(BaseModel):
field: IDStr = Field()
direction: OrderDirection = Field(default=OrderDirection.ASC)
- class Config:
- extra = "forbid"
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/rest_pagination.py b/packages/models-library/src/models_library/rest_pagination.py
index 89c90cb1c2d3..d8e3b9990b35 100644
--- a/packages/models-library/src/models_library/rest_pagination.py
+++ b/packages/models-library/src/models_library/rest_pagination.py
@@ -1,17 +1,16 @@
-from typing import Any, ClassVar, Final, Generic, TypeVar
+from typing import Annotated, Final, Generic, TypeAlias, TypeVar
from pydantic import (
AnyHttpUrl,
BaseModel,
- ConstrainedInt,
- Extra,
+ ConfigDict,
Field,
NonNegativeInt,
PositiveInt,
- parse_obj_as,
- validator,
+ TypeAdapter,
+ ValidationInfo,
+ field_validator,
)
-from pydantic.generics import GenericModel
from .utils.common_validators import none_to_empty_list_pre_validator
@@ -21,19 +20,20 @@
MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE: Final[int] = 50
-class PageLimitInt(ConstrainedInt):
- ge = 1
- lt = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE
+PageLimitInt: TypeAlias = Annotated[int, Field(ge=1, lt=MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE)]
-
-DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = parse_obj_as(PageLimitInt, 20)
+DEFAULT_NUMBER_OF_ITEMS_PER_PAGE: Final[PageLimitInt] = TypeAdapter(
+ PageLimitInt
+).validate_python(20)
class PageQueryParameters(BaseModel):
"""Use as pagination options in query parameters"""
limit: PageLimitInt = Field(
- default=parse_obj_as(PageLimitInt, DEFAULT_NUMBER_OF_ITEMS_PER_PAGE),
+ default=TypeAdapter(PageLimitInt).validate_python(
+ DEFAULT_NUMBER_OF_ITEMS_PER_PAGE
+ ),
description="maximum number of items to return (pagination)",
)
offset: NonNegativeInt = Field(
@@ -47,38 +47,36 @@ class PageMetaInfoLimitOffset(BaseModel):
offset: NonNegativeInt = 0
count: NonNegativeInt
- @validator("offset")
+ @field_validator("offset")
@classmethod
- def _check_offset(cls, v, values):
- if v > 0 and v >= values["total"]:
- msg = f"offset {v} cannot be equal or bigger than total {values['total']}, please check"
+ def _check_offset(cls, v, info: ValidationInfo):
+ if v > 0 and v >= info.data["total"]:
+ msg = f"offset {v} cannot be equal or bigger than total {info.data['total']}, please check"
raise ValueError(msg)
return v
- @validator("count")
+ @field_validator("count")
@classmethod
- def _check_count(cls, v, values):
- if v > values["limit"]:
- msg = f"count {v} bigger than limit {values['limit']}, please check"
+ def _check_count(cls, v, info: ValidationInfo):
+ if v > info.data["limit"]:
+ msg = f"count {v} bigger than limit {info.data['limit']}, please check"
raise ValueError(msg)
- if v > values["total"]:
- msg = (
- f"count {v} bigger than expected total {values['total']}, please check"
- )
+ if v > info.data["total"]:
+ msg = f"count {v} bigger than expected total {info.data['total']}, please check"
raise ValueError(msg)
- if "offset" in values and (values["offset"] + v) > values["total"]:
- msg = f"offset {values['offset']} + count {v} is bigger than allowed total {values['total']}, please check"
+ if "offset" in info.data and (info.data["offset"] + v) > info.data["total"]:
+ msg = f"offset {info.data['offset']} + count {v} is bigger than allowed total {info.data['total']}, please check"
raise ValueError(msg)
return v
- class Config:
- extra = Extra.forbid
-
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
{"total": 7, "count": 4, "limit": 4, "offset": 0},
]
- }
+ },
+ )
RefT = TypeVar("RefT")
@@ -91,18 +89,17 @@ class PageRefs(BaseModel, Generic[RefT]):
next: RefT | None
last: RefT
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
-class PageLinks(PageRefs[AnyHttpUrl]):
+class PageLinks(PageRefs[Annotated[str, AnyHttpUrl]]):
...
ItemT = TypeVar("ItemT")
-class Page(GenericModel, Generic[ItemT]):
+class Page(BaseModel, Generic[ItemT]):
"""
Paginated response model of ItemTs
"""
@@ -111,26 +108,25 @@ class Page(GenericModel, Generic[ItemT]):
links: PageLinks = Field(alias="_links")
data: list[ItemT]
- _none_is_empty = validator("data", allow_reuse=True, pre=True)(
+ _none_is_empty = field_validator("data", mode="before")(
none_to_empty_list_pre_validator
)
- @validator("data")
+ @field_validator("data")
@classmethod
- def _check_data_compatible_with_meta(cls, v, values):
- if "meta" not in values:
+ def _check_data_compatible_with_meta(cls, v, info: ValidationInfo):
+ if "meta" not in info.data:
# if the validation failed in meta this happens
msg = "meta not in values"
raise ValueError(msg)
- if len(v) != values["meta"].count:
- msg = f"container size [{len(v)}] must be equal to count [{values['meta'].count}]"
+ if len(v) != info.data["meta"].count:
+ msg = f"container size [{len(v)}] must be equal to count [{info.data['meta'].count}]"
raise ValueError(msg)
return v
- class Config:
- extra = Extra.forbid
-
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
# first page Page[str]
{
@@ -157,4 +153,5 @@ class Config:
"data": ["data 5", "data 6", "data 7"],
},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/rest_pagination_utils.py b/packages/models-library/src/models_library/rest_pagination_utils.py
index ec9cedf0a544..8d901d50f307 100644
--- a/packages/models-library/src/models_library/rest_pagination_utils.py
+++ b/packages/models-library/src/models_library/rest_pagination_utils.py
@@ -1,8 +1,9 @@
from math import ceil
from typing import Any, Protocol, TypedDict, Union, runtime_checkable
-from pydantic import AnyHttpUrl, parse_obj_as
+from pydantic import parse_obj_as
+from .basic_types import AnyHttpUrl
from .rest_pagination import PageLinks, PageMetaInfoLimitOffset
# NOTE: In this repo we use two type of URL-like data structures:
diff --git a/packages/models-library/src/models_library/rpc_pagination.py b/packages/models-library/src/models_library/rpc_pagination.py
index 34eeb9979906..0ec454cc9fd2 100644
--- a/packages/models-library/src/models_library/rpc_pagination.py
+++ b/packages/models-library/src/models_library/rpc_pagination.py
@@ -1,8 +1,8 @@
# mypy: disable-error-code=truthy-function
from math import ceil
-from typing import Any, ClassVar, Generic
+from typing import Any, Generic
-from pydantic import Extra, Field
+from pydantic import ConfigDict, Field
from .rest_pagination import (
DEFAULT_NUMBER_OF_ITEMS_PER_PAGE,
@@ -74,10 +74,9 @@ def create(
data=chunk,
)
- class Config:
- extra = Extra.forbid
-
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ json_schema_extra={
"examples": [
# first page Page[str]
{
@@ -104,4 +103,5 @@ class Config:
"data": ["data 5", "data 6", "data 7"],
},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py
index 95bc13a0b094..8f954737231d 100644
--- a/packages/models-library/src/models_library/service_settings_labels.py
+++ b/packages/models-library/src/models_library/service_settings_labels.py
@@ -3,19 +3,20 @@
from enum import Enum
from functools import cached_property
from pathlib import Path
-from typing import Any, ClassVar, Literal, TypeAlias
+from typing import Any, Literal, TypeAlias
from pydantic import (
BaseModel,
ByteSize,
- Extra,
+ ConfigDict,
Field,
Json,
PrivateAttr,
ValidationError,
+ ValidationInfo,
+ field_validator,
+ model_validator,
parse_obj_as,
- root_validator,
- validator,
)
from .callbacks_mapping import CallbacksMapping
@@ -24,11 +25,9 @@
from .services_resources import DEFAULT_SINGLE_SERVICE_NAME
from .utils.json_serialization import json_dumps
-
-class _BaseConfig:
- arbitrary_types_allowed = True
- extra = Extra.forbid
- keep_untouched = (cached_property,)
+_BaseConfig = ConfigDict(
+ extra="forbid", arbitrary_types_allowed=True, ignored_types=(cached_property,)
+)
class ContainerSpec(BaseModel):
@@ -40,18 +39,20 @@ class ContainerSpec(BaseModel):
alias="Command",
description="Used to override the container's command",
# NOTE: currently constraint to our use cases. Might mitigate some security issues.
- min_items=1,
- max_items=2,
+ min_length=1,
+ max_length=2,
)
- class Config(_BaseConfig):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ **_BaseConfig,
+ json_schema_extra={
"examples": [
{"Command": ["executable"]},
{"Command": ["executable", "subcommand"]},
{"Command": ["ofs", "linear-regression"]},
]
- }
+ },
+ )
class SimcoreServiceSettingLabelEntry(BaseModel):
@@ -93,7 +94,7 @@ def get_destination_containers(self) -> list[str]:
# as fields
return self._destination_containers
- @validator("setting_type", pre=True)
+ @field_validator("setting_type", mode="before")
@classmethod
def ensure_backwards_compatible_setting_type(cls, v):
if v == "resources":
@@ -101,9 +102,10 @@ def ensure_backwards_compatible_setting_type(cls, v):
return "Resources"
return v
- class Config(_BaseConfig):
- allow_population_by_field_name = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ **_BaseConfig,
+ populate_by_name=True,
+ json_schema_extra={
"examples": [
# constraints
{
@@ -157,7 +159,8 @@ class Config(_BaseConfig):
},
},
]
- }
+ },
+ )
SimcoreServiceSettingsLabel = ListModel[SimcoreServiceSettingLabelEntry]
@@ -191,9 +194,9 @@ class PathMappingsLabel(BaseModel):
),
)
- @validator("volume_size_limits")
+ @field_validator("volume_size_limits")
@classmethod
- def validate_volume_limits(cls, v, values) -> str | None:
+ def validate_volume_limits(cls, v, info: ValidationInfo) -> str | None:
if v is None:
return v
@@ -205,9 +208,9 @@ def validate_volume_limits(cls, v, values) -> str | None:
msg = f"Provided size='{size_str}' contains invalid charactes: {e!s}"
raise ValueError(msg) from e
- inputs_path: Path | None = values.get("inputs_path")
- outputs_path: Path | None = values.get("outputs_path")
- state_paths: list[Path] | None = values.get("state_paths")
+ inputs_path: Path | None = info.data.get("inputs_path")
+ outputs_path: Path | None = info.data.get("outputs_path")
+ state_paths: list[Path] | None = info.data.get("state_paths")
path = Path(path_str)
if not (
path in (inputs_path, outputs_path)
@@ -218,8 +221,9 @@ def validate_volume_limits(cls, v, values) -> str | None:
output: str | None = v
return output
- class Config(_BaseConfig):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ **_BaseConfig,
+ json_schema_extra={
"examples": [
{
"outputs_path": "/tmp/outputs", # noqa: S108 nosec
@@ -249,7 +253,8 @@ class Config(_BaseConfig):
},
},
]
- }
+ },
+ )
ComposeSpecLabelDict: TypeAlias = dict[str, Any]
@@ -292,6 +297,7 @@ class DynamicSidecarServiceLabels(BaseModel):
"specified. Required by dynamic-sidecar when "
"compose_spec is set."
),
+ validate_default=True,
)
user_preferences_path: Path | None = Field(
@@ -339,25 +345,29 @@ def needs_dynamic_sidecar(self) -> bool:
"""if paths mapping is present the service needs to be ran via dynamic-sidecar"""
return self.paths_mapping is not None
- @validator("container_http_entry", always=True)
+ @field_validator("container_http_entry")
@classmethod
- def compose_spec_requires_container_http_entry(cls, v, values) -> str | None:
+ def compose_spec_requires_container_http_entry(
+ cls, v, info: ValidationInfo
+ ) -> str | None:
v = None if v == "" else v
- if v is None and values.get("compose_spec") is not None:
+ if v is None and info.data.get("compose_spec") is not None:
msg = "Field `container_http_entry` must be defined but is missing"
raise ValueError(msg)
- if v is not None and values.get("compose_spec") is None:
+ if v is not None and info.data.get("compose_spec") is None:
msg = "`container_http_entry` not allowed if `compose_spec` is missing"
raise ValueError(msg)
return f"{v}" if v else v
- @validator("containers_allowed_outgoing_permit_list")
+ @field_validator("containers_allowed_outgoing_permit_list")
@classmethod
- def _containers_allowed_outgoing_permit_list_in_compose_spec(cls, v, values):
+ def _containers_allowed_outgoing_permit_list_in_compose_spec(
+ cls, v, info: ValidationInfo
+ ):
if v is None:
return v
- compose_spec: dict | None = values.get("compose_spec")
+ compose_spec: dict | None = info.data.get("compose_spec")
if compose_spec is None:
keys = set(v.keys())
if len(keys) != 1 or DEFAULT_SINGLE_SERVICE_NAME not in keys:
@@ -372,13 +382,15 @@ def _containers_allowed_outgoing_permit_list_in_compose_spec(cls, v, values):
return v
- @validator("containers_allowed_outgoing_internet")
+ @field_validator("containers_allowed_outgoing_internet")
@classmethod
- def _containers_allowed_outgoing_internet_in_compose_spec(cls, v, values):
+ def _containers_allowed_outgoing_internet_in_compose_spec(
+ cls, v, info: ValidationInfo
+ ):
if v is None:
- return v
+ return None
- compose_spec: dict | None = values.get("compose_spec")
+ compose_spec: dict | None = info.data.get("compose_spec")
if compose_spec is None:
if {DEFAULT_SINGLE_SERVICE_NAME} != v:
err_msg = (
@@ -393,10 +405,10 @@ def _containers_allowed_outgoing_internet_in_compose_spec(cls, v, values):
raise ValueError(err_msg)
return v
- @validator("callbacks_mapping")
+ @field_validator("callbacks_mapping")
@classmethod
def _ensure_callbacks_mapping_container_names_defined_in_compose_spec(
- cls, v: CallbacksMapping, values
+ cls, v: CallbacksMapping, info: ValidationInfo
):
if v is None:
return {}
@@ -408,7 +420,7 @@ def _ensure_callbacks_mapping_container_names_defined_in_compose_spec(
if len(defined_services) == 0:
return v
- compose_spec: dict | None = values.get("compose_spec")
+ compose_spec: dict | None = info.data.get("compose_spec")
if compose_spec is None:
if {DEFAULT_SINGLE_SERVICE_NAME} != defined_services:
err_msg = f"Expected only 1 entry '{DEFAULT_SINGLE_SERVICE_NAME}' not '{defined_services}'"
@@ -421,17 +433,17 @@ def _ensure_callbacks_mapping_container_names_defined_in_compose_spec(
raise ValueError(err_msg)
return v
- @validator("user_preferences_path", pre=True)
+ @field_validator("user_preferences_path", mode="before")
@classmethod
def _deserialize_from_json(cls, v):
return f"{v}".removeprefix('"').removesuffix('"')
- @validator("user_preferences_path")
+ @field_validator("user_preferences_path")
@classmethod
def _user_preferences_path_no_included_in_other_volumes(
- cls, v: CallbacksMapping, values
+ cls, v: CallbacksMapping, info: ValidationInfo
):
- paths_mapping: PathMappingsLabel | None = values.get("paths_mapping", None)
+ paths_mapping: PathMappingsLabel | None = info.data.get("paths_mapping", None)
if paths_mapping is None:
return v
@@ -445,33 +457,24 @@ def _user_preferences_path_no_included_in_other_volumes(
raise ValueError(msg)
return v
- @root_validator
- @classmethod
- def _not_allowed_in_both_specs(cls, values):
+ @model_validator(mode="after")
+ def _not_allowed_in_both_specs(self):
match_keys = {
"containers_allowed_outgoing_internet",
"containers_allowed_outgoing_permit_list",
}
- if match_keys & set(values.keys()) != match_keys:
- err_msg = (
- f"Expected the following keys {match_keys} to be present {values=}"
- )
+ if match_keys & set(self.model_fields) != match_keys:
+ err_msg = f"Expected the following keys {match_keys} to be present {self.model_fields=}"
raise ValueError(err_msg)
- containers_allowed_outgoing_internet = values[
- "containers_allowed_outgoing_internet"
- ]
- containers_allowed_outgoing_permit_list = values[
- "containers_allowed_outgoing_permit_list"
- ]
if (
- containers_allowed_outgoing_internet is None
- or containers_allowed_outgoing_permit_list is None
+ self.containers_allowed_outgoing_internet is None
+ or self.containers_allowed_outgoing_permit_list is None
):
- return values
+ return self
- common_containers = set(containers_allowed_outgoing_internet) & set(
- containers_allowed_outgoing_permit_list.keys()
+ common_containers = set(self.containers_allowed_outgoing_internet) & set(
+ self.containers_allowed_outgoing_permit_list.keys()
)
if len(common_containers) > 0:
err_msg = (
@@ -481,10 +484,9 @@ def _not_allowed_in_both_specs(cls, values):
)
raise ValueError(err_msg)
- return values
+ return self
- class Config(_BaseConfig):
- ...
+ model_config = _BaseConfig
class SimcoreServiceLabels(DynamicSidecarServiceLabels):
@@ -513,24 +515,32 @@ class SimcoreServiceLabels(DynamicSidecarServiceLabels):
),
)
- class Config(_BaseConfig):
- extra = Extra.allow
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = _BaseConfig | ConfigDict(
+ extra="allow",
+ json_schema_extra={
"examples": [
# WARNING: do not change order. Used in tests!
# legacy service
{
"simcore.service.settings": json_dumps(
- SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"]
+ SimcoreServiceSettingLabelEntry.model_config[
+ "json_schema_extra"
+ ][
+ "examples"
+ ] # type: ignore[index]
)
},
# dynamic-service
{
"simcore.service.settings": json_dumps(
- SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"]
+ SimcoreServiceSettingLabelEntry.model_config[
+ "json_schema_extra"
+ ][
+ "examples"
+ ] # type: ignore[index]
),
"simcore.service.paths-mapping": json_dumps(
- PathMappingsLabel.Config.schema_extra["examples"][0]
+ PathMappingsLabel.model_config["json_schema_extra"]["examples"][0] # type: ignore [index]
),
"simcore.service.restart-policy": RestartPolicy.NO_RESTART.value,
"simcore.service.callbacks-mapping": json_dumps(
@@ -549,10 +559,14 @@ class Config(_BaseConfig):
# dynamic-service with compose spec
{
"simcore.service.settings": json_dumps(
- SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"]
+ SimcoreServiceSettingLabelEntry.model_config[
+ "json_schema_extra"
+ ][
+ "examples"
+ ] # type: ignore[index]
),
"simcore.service.paths-mapping": json_dumps(
- PathMappingsLabel.Config.schema_extra["examples"][0]
+ PathMappingsLabel.model_config["json_schema_extra"]["examples"][0], # type: ignore[index]
),
"simcore.service.compose-spec": json_dumps(
{
@@ -580,8 +594,9 @@ class Config(_BaseConfig):
"simcore.service.container-http-entrypoint": "rt-web",
"simcore.service.restart-policy": RestartPolicy.ON_INPUTS_DOWNLOADED.value,
"simcore.service.callbacks-mapping": json_dumps(
- CallbacksMapping.Config.schema_extra["examples"][3]
+ CallbacksMapping.model_config["json_schema_extra"]["examples"][3] # type: ignore [index]
),
},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py
index bcdf0604eec6..ee937c81254f 100644
--- a/packages/models-library/src/models_library/service_settings_nat_rule.py
+++ b/packages/models-library/src/models_library/service_settings_nat_rule.py
@@ -1,7 +1,14 @@
from collections.abc import Generator
-from typing import Any, ClassVar, Final
+from typing import Final
-from pydantic import BaseModel, Extra, Field, parse_obj_as, validator
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ ValidationInfo,
+ field_validator,
+ parse_obj_as,
+)
from .basic_types import PortInt
from .osparc_variable_identifier import OsparcVariableIdentifier, raise_if_unresolved
@@ -17,14 +24,14 @@ class _PortRange(BaseModel):
lower: PortInt | OsparcVariableIdentifier
upper: PortInt | OsparcVariableIdentifier
- @validator("upper")
+ @field_validator("upper")
@classmethod
- def lower_less_than_upper(cls, v, values) -> PortInt:
+ def lower_less_than_upper(cls, v, info: ValidationInfo) -> PortInt:
if isinstance(v, OsparcVariableIdentifier):
return v # type: ignore # bypass validation if unresolved
upper = v
- lower: PortInt | OsparcVariableIdentifier | None = values.get("lower")
+ lower: PortInt | OsparcVariableIdentifier | None = info.data.get("lower")
if lower and isinstance(lower, OsparcVariableIdentifier):
return v # type: ignore # bypass validation if unresolved
@@ -34,9 +41,7 @@ def lower_less_than_upper(cls, v, values) -> PortInt:
raise ValueError(msg)
return PortInt(v)
- class Config:
- arbitrary_types_allowed = True
- validate_assignment = True
+ model_config = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True)
class DNSResolver(BaseModel):
@@ -45,16 +50,17 @@ class DNSResolver(BaseModel):
)
port: PortInt | OsparcVariableIdentifier
- class Config:
- arbitrary_types_allowed = True
- validate_assignment = True
- extra = Extra.allow
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ arbitrary_types_allowed=True,
+ validate_assignment=True,
+ extra="allow",
+ json_schema_extra={
"examples": [
{"address": "1.1.1.1", "port": 53}, # NOSONAR
{"address": "ns1.example.com", "port": 53},
]
- }
+ },
+ )
class NATRule(BaseModel):
@@ -69,6 +75,8 @@ class NATRule(BaseModel):
description="specify a DNS resolver address and port",
)
+ model_config = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True)
+
def iter_tcp_ports(self) -> Generator[PortInt, None, None]:
for port in self.tcp_ports:
if isinstance(port, _PortRange):
@@ -81,7 +89,3 @@ def iter_tcp_ports(self) -> Generator[PortInt, None, None]:
)
else:
yield raise_if_unresolved(port)
-
- class Config:
- arbitrary_types_allowed = True
- validate_assignment = True
diff --git a/packages/models-library/src/models_library/services_access.py b/packages/models-library/src/models_library/services_access.py
index 8bc6786c6955..84dbd7d17a0e 100644
--- a/packages/models-library/src/models_library/services_access.py
+++ b/packages/models-library/src/models_library/services_access.py
@@ -2,7 +2,7 @@
"""
-from pydantic import BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field
from .users import GroupID
from .utils.change_case import snake_to_camel
@@ -22,10 +22,9 @@ class ServiceGroupAccessRightsV2(BaseModel):
execute: bool = False
write: bool = False
- class Config:
- alias_generator = snake_to_camel
- allow_population_by_field_name = True
- extra = Extra.forbid
+ model_config = ConfigDict(
+ alias_generator=snake_to_camel, populate_by_name=True, extra="forbid"
+ )
class ServiceAccessRights(BaseModel):
diff --git a/packages/models-library/src/models_library/services_authoring.py b/packages/models-library/src/models_library/services_authoring.py
index 18673319f46b..05b5197994c8 100644
--- a/packages/models-library/src/models_library/services_authoring.py
+++ b/packages/models-library/src/models_library/services_authoring.py
@@ -1,6 +1,4 @@
-from typing import Any, ClassVar
-
-from pydantic import BaseModel, Field, HttpUrl
+from pydantic import BaseModel, ConfigDict, Field, HttpUrl
from .emails import LowerCaseEmailStr
@@ -18,15 +16,15 @@ class Badge(BaseModel):
...,
description="Link to the status",
)
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"name": "osparc.io",
"image": "https://img.shields.io/website-up-down-green-red/https/itisfoundation.github.io.svg?label=documentation",
"url": "https://itisfoundation.github.io/",
}
}
+ )
class Author(BaseModel):
@@ -39,9 +37,8 @@ class Author(BaseModel):
description="Email address",
)
affiliation: str | None = Field(None)
-
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"name": "Jim Knopf",
@@ -54,3 +51,4 @@ class Config:
},
]
}
+ )
diff --git a/packages/models-library/src/models_library/services_base.py b/packages/models-library/src/models_library/services_base.py
index d80fc59df24c..5f92d6e46b6b 100644
--- a/packages/models-library/src/models_library/services_base.py
+++ b/packages/models-library/src/models_library/services_base.py
@@ -1,4 +1,5 @@
-from pydantic import BaseModel, Field, HttpUrl, validator
+from typing import Annotated
+from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator
from .services_types import ServiceKey, ServiceVersion
from .utils.common_validators import empty_str_to_none_pre_validator
@@ -16,22 +17,22 @@ class ServiceKeyVersion(BaseModel):
description="service version number",
)
- class Config:
- frozen = True
+ model_config = ConfigDict(frozen=True)
class ServiceBaseDisplay(BaseModel):
name: str = Field(
...,
description="Display name: short, human readable name for the node",
- example="Fast Counter",
+ examples=["Fast Counter"],
)
- thumbnail: HttpUrl | None = Field(
+ thumbnail: Annotated[str, HttpUrl] | None = Field(
None,
description="url to the thumbnail",
examples=[
"https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png"
],
+ validate_default=True,
)
description: str = Field(
...,
@@ -53,6 +54,6 @@ class ServiceBaseDisplay(BaseModel):
" This name is not used for version comparison but is useful for communication and documentation purposes.",
)
- _empty_is_none = validator("thumbnail", allow_reuse=True, pre=True, always=False)(
+ _empty_is_none = field_validator("thumbnail", mode="before")(
empty_str_to_none_pre_validator
)
diff --git a/packages/models-library/src/models_library/services_creation.py b/packages/models-library/src/models_library/services_creation.py
index e2102efe0750..c1c2c5172fcd 100644
--- a/packages/models-library/src/models_library/services_creation.py
+++ b/packages/models-library/src/models_library/services_creation.py
@@ -1,6 +1,6 @@
-from typing import Any, ClassVar
+from typing import Any
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
from .services import ServiceKey, ServiceVersion
from .services_resources import ServiceResourcesDict
@@ -23,8 +23,8 @@ class CreateServiceMetricsAdditionalParams(BaseModel):
service_resources: ServiceResourcesDict
service_additional_metadata: dict[str, Any]
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"wallet_id": 1,
"wallet_name": "a private wallet for me",
@@ -42,3 +42,4 @@ class Config:
"service_additional_metadata": {},
}
}
+ )
diff --git a/packages/models-library/src/models_library/services_history.py b/packages/models-library/src/models_library/services_history.py
index 70f4e513c15b..b38f5f2e783e 100644
--- a/packages/models-library/src/models_library/services_history.py
+++ b/packages/models-library/src/models_library/services_history.py
@@ -1,7 +1,7 @@
from datetime import datetime
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, ConfigDict, Field
from .services_types import ServiceKey, ServiceVersion
from .utils.change_case import snake_to_camel
@@ -21,9 +21,7 @@ class Compatibility(BaseModel):
..., description="Latest compatible service at this moment"
)
- class Config:
- alias_generator = snake_to_camel
- allow_population_by_field_name = True
+ model_config = ConfigDict(alias_generator=snake_to_camel, populate_by_name=True)
class ServiceRelease(BaseModel):
@@ -46,10 +44,10 @@ class ServiceRelease(BaseModel):
default=None, description="Compatibility with other releases at this moment"
)
- class Config:
- alias_generator = snake_to_camel
- allow_population_by_field_name = True
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ alias_generator=snake_to_camel,
+ populate_by_name=True,
+ json_schema_extra={
"examples": [
# minimal
{
@@ -69,7 +67,8 @@ class Config:
},
},
]
- }
+ },
+ )
ReleaseHistory: TypeAlias = list[ServiceRelease]
diff --git a/packages/models-library/src/models_library/services_io.py b/packages/models-library/src/models_library/services_io.py
index 52c09fa241d7..49264f197999 100644
--- a/packages/models-library/src/models_library/services_io.py
+++ b/packages/models-library/src/models_library/services_io.py
@@ -1,15 +1,15 @@
-import re
-from typing import Any, ClassVar
+from typing import Annotated, Any, TypeAlias
from pydantic import (
BaseModel,
- ConstrainedStr,
- Extra,
+ ConfigDict,
Field,
StrictBool,
StrictFloat,
StrictInt,
- validator,
+ StringConstraints,
+ ValidationInfo,
+ field_validator,
)
from .services_constants import ANY_FILETYPE
@@ -22,12 +22,7 @@
jsonschema_validate_schema,
)
-
-class PropertyTypeStr(ConstrainedStr):
- regex = re.compile(PROPERTY_TYPE_RE)
-
- class Config:
- frozen = True
+PropertyTypeStr: TypeAlias = Annotated[str, StringConstraints(pattern=PROPERTY_TYPE_RE)]
class BaseServiceIOModel(BaseModel):
@@ -45,11 +40,11 @@ class BaseServiceIOModel(BaseModel):
description="DEPRECATED: new display order is taken from the item position. This will be removed.",
)
- label: str = Field(..., description="short name for the property", example="Age")
+ label: str = Field(..., description="short name for the property", examples=["Age"])
description: str = Field(
...,
description="description of the property",
- example="Age in seconds since 1970",
+ examples=["Age in seconds since 1970"],
)
# mathematical and physics descriptors
@@ -92,18 +87,20 @@ class BaseServiceIOModel(BaseModel):
deprecated=True, # add x_unit in content_schema instead
)
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
- @validator("content_schema")
+ @field_validator("content_schema")
@classmethod
- def _check_type_is_set_to_schema(cls, v, values):
- if v is not None and (ptype := values["property_type"]) != "ref_contentSchema":
+ def _check_type_is_set_to_schema(cls, v, info: ValidationInfo):
+ if (
+ v is not None
+ and (ptype := info.data["property_type"]) != "ref_contentSchema"
+ ):
msg = f"content_schema is defined but set the wrong type. Expected type=ref_contentSchema but got ={ptype}."
raise ValueError(msg)
return v
- @validator("content_schema")
+ @field_validator("content_schema")
@classmethod
def _check_valid_json_schema(cls, v):
if v is not None:
@@ -151,8 +148,9 @@ class ServiceInput(BaseServiceIOModel):
description="custom widget to use instead of the default one determined from the data-type",
)
- class Config(BaseServiceIOModel.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ **BaseServiceIOModel.model_config,
+ json_schema_extra={
"examples": [
# file-wo-widget:
{
@@ -206,7 +204,8 @@ class Config(BaseServiceIOModel.Config):
},
},
],
- }
+ },
+ )
@classmethod
def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceInput":
@@ -222,8 +221,9 @@ class ServiceOutput(BaseServiceIOModel):
deprecated=True,
)
- class Config(BaseServiceIOModel.Config):
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ **BaseServiceIOModel.model_config,
+ json_schema_extra={
"examples": [
{
"displayOrder": 2,
@@ -251,7 +251,8 @@ class Config(BaseServiceIOModel.Config):
"type": ANY_FILETYPE,
},
]
- }
+ },
+ )
@classmethod
def from_json_schema(cls, port_schema: dict[str, Any]) -> "ServiceOutput":
diff --git a/packages/models-library/src/models_library/services_metadata_editable.py b/packages/models-library/src/models_library/services_metadata_editable.py
index 18d66483f1c3..4ad106225c09 100644
--- a/packages/models-library/src/models_library/services_metadata_editable.py
+++ b/packages/models-library/src/models_library/services_metadata_editable.py
@@ -1,8 +1,8 @@
# mypy: disable-error-code=truthy-function
from datetime import datetime
-from typing import Any, ClassVar
+from typing import Annotated, Any
-from pydantic import Field, HttpUrl
+from pydantic import ConfigDict, Field, HttpUrl
from .services_base import ServiceBaseDisplay
from .services_constants import LATEST_INTEGRATION_VERSION
@@ -19,7 +19,7 @@
class ServiceMetaDataEditable(ServiceBaseDisplay):
# Overrides ServiceBaseDisplay fields to Optional for a partial update
name: str | None # type: ignore[assignment]
- thumbnail: HttpUrl | None
+ thumbnail: Annotated[str, HttpUrl] | None
description: str | None # type: ignore[assignment]
description_ui: bool = False
version_display: str | None = None
@@ -35,8 +35,8 @@ class ServiceMetaDataEditable(ServiceBaseDisplay):
classifiers: list[str] | None
quality: dict[str, Any] = {}
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"key": "simcore/services/dynamic/sim4life",
"version": "1.0.9",
@@ -62,3 +62,4 @@ class Config:
},
}
}
+ )
diff --git a/packages/models-library/src/models_library/services_metadata_published.py b/packages/models-library/src/models_library/services_metadata_published.py
index b50d838d9d11..51fba05b7f42 100644
--- a/packages/models-library/src/models_library/services_metadata_published.py
+++ b/packages/models-library/src/models_library/services_metadata_published.py
@@ -1,7 +1,7 @@
from datetime import datetime
-from typing import Any, ClassVar, Final, TypeAlias
+from typing import Final, TypeAlias
-from pydantic import Extra, Field, NonNegativeInt
+from pydantic import ConfigDict, Field, NonNegativeInt
from .basic_types import SemanticVersionStr
from .boot_options import BootOption, BootOptions
@@ -76,12 +76,8 @@
}
},
"boot-options": {
- "example_service_defined_boot_mode": BootOption.Config.schema_extra["examples"][
- 0
- ],
- "example_service_defined_theme_selection": BootOption.Config.schema_extra[
- "examples"
- ][1],
+ "example_service_defined_boot_mode": BootOption.model_config["json_schema_extra"]["examples"][0], # type: ignore [index]
+ "example_service_defined_theme_selection": BootOption.model_config["json_schema_extra"]["examples"][1], # type: ignore [index]
},
"min-visible-inputs": 2,
}
@@ -120,7 +116,7 @@ class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBaseDisplay):
badges: list[Badge] | None = Field(None, deprecated=True)
- authors: list[Author] = Field(..., min_items=1)
+ authors: list[Author] = Field(..., min_length=1)
contact: LowerCaseEmailStr = Field(
...,
description="email to correspond to the authors about the node",
@@ -160,22 +156,21 @@ class ServiceMetaDataPublished(ServiceKeyVersion, ServiceBaseDisplay):
description="Image manifest digest. Note that this is NOT injected as an image label",
)
- class Config:
- description = "Description of a simcore node 'class' with input and output"
- extra = Extra.forbid
- frozen = False # overrides config from ServiceKeyVersion.
- allow_population_by_field_name = True
-
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ extra="forbid",
+ frozen=False,
+ populate_by_name=True,
+ json_schema_extra={
"examples": [
- _EXAMPLE,
- _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER,
+ _EXAMPLE, # type: ignore[list-item]
+ _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, # type: ignore[list-item]
# latest
{
- **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER,
+ **_EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, # type: ignore[dict-item]
"version_display": "Matterhorn Release",
"description_ui": True,
"release_date": "2024-05-31T13:45:30",
},
]
- }
+ },
+ )
diff --git a/packages/models-library/src/models_library/services_resources.py b/packages/models-library/src/models_library/services_resources.py
index 7fe4f268f8c5..ac7a68627548 100644
--- a/packages/models-library/src/models_library/services_resources.py
+++ b/packages/models-library/src/models_library/services_resources.py
@@ -1,15 +1,16 @@
import logging
from enum import auto
-from typing import Any, ClassVar, Final, TypeAlias
+from typing import Any, Final, TypeAlias
from pydantic import (
BaseModel,
ByteSize,
+ ConfigDict,
Field,
StrictFloat,
StrictInt,
- parse_obj_as,
- root_validator,
+ TypeAdapter,
+ model_validator,
)
from .docker import DockerGenericTag
@@ -23,13 +24,13 @@
# NOTE: replace hard coded `container` with function which can
# extract the name from the `service_key` or `registry_address/service_key`
-DEFAULT_SINGLE_SERVICE_NAME: Final[DockerGenericTag] = parse_obj_as(
- DockerGenericTag, "container"
-)
+DEFAULT_SINGLE_SERVICE_NAME: Final[DockerGenericTag] = TypeAdapter(
+ DockerGenericTag
+).validate_python("container")
-MEMORY_50MB: Final[int] = parse_obj_as(ByteSize, "50mib")
-MEMORY_250MB: Final[int] = parse_obj_as(ByteSize, "250mib")
-MEMORY_1GB: Final[int] = parse_obj_as(ByteSize, "1gib")
+MEMORY_50MB: Final[int] = TypeAdapter(ByteSize).validate_python("50mib")
+MEMORY_250MB: Final[int] = TypeAdapter(ByteSize).validate_python("250mib")
+MEMORY_1GB: Final[int] = TypeAdapter(ByteSize).validate_python("1gib")
GIGA: Final[float] = 1e9
CPU_10_PERCENT: Final[int] = int(0.1 * GIGA)
@@ -40,7 +41,7 @@ class ResourceValue(BaseModel):
limit: StrictInt | StrictFloat | str
reservation: StrictInt | StrictFloat | str
- @root_validator()
+ @model_validator(mode="before")
@classmethod
def _ensure_limits_are_equal_or_above_reservations(cls, values):
if isinstance(values["reservation"], str):
@@ -59,8 +60,7 @@ def set_reservation_same_as_limit(self) -> None:
def set_value(self, value: StrictInt | StrictFloat | str) -> None:
self.limit = self.reservation = value
- class Config:
- validate_assignment = True
+ model_config = ConfigDict(validate_assignment=True)
ResourcesDict = dict[ResourceName, ResourceValue]
@@ -92,8 +92,8 @@ def set_reservation_same_as_limit(self) -> None:
for resource in self.resources.values():
resource.set_reservation_same_as_limit()
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"example": {
"image": "simcore/service/dynamic/pretty-intense:1.0.0",
"resources": {
@@ -108,6 +108,7 @@ class Config:
},
}
}
+ )
ServiceResourcesDict: TypeAlias = dict[DockerGenericTag, ImageResources]
@@ -122,8 +123,7 @@ def create_from_single_service(
) -> ServiceResourcesDict:
if boot_modes is None:
boot_modes = [BootMode.CPU]
- return parse_obj_as(
- ServiceResourcesDict,
+ return TypeAdapter(ServiceResourcesDict).validate_python(
{
DEFAULT_SINGLE_SERVICE_NAME: {
"image": image,
@@ -140,8 +140,8 @@ def create_jsonable(
output: dict[DockerGenericTag, Any] = jsonable_encoder(service_resources)
return output
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
# no compose spec (majority of services)
{
@@ -150,8 +150,10 @@ class Config:
"resources": {
"CPU": {"limit": 0.1, "reservation": 0.1},
"RAM": {
- "limit": parse_obj_as(ByteSize, "2Gib"),
- "reservation": parse_obj_as(ByteSize, "2Gib"),
+ "limit": TypeAdapter(ByteSize).validate_python("2Gib"),
+ "reservation": TypeAdapter(ByteSize).validate_python(
+ "2Gib"
+ ),
},
},
"boot_modes": [BootMode.CPU],
@@ -181,8 +183,10 @@ class Config:
"resources": {
"CPU": {"limit": 0.1, "reservation": 0.1},
"RAM": {
- "limit": parse_obj_as(ByteSize, "2Gib"),
- "reservation": parse_obj_as(ByteSize, "2Gib"),
+ "limit": TypeAdapter(ByteSize).validate_python("2Gib"),
+ "reservation": TypeAdapter(ByteSize).validate_python(
+ "2Gib"
+ ),
},
},
"boot_modes": [BootMode.CPU],
@@ -195,8 +199,10 @@ class Config:
"resources": {
"CPU": {"limit": 0.1, "reservation": 0.1},
"RAM": {
- "limit": parse_obj_as(ByteSize, "2Gib"),
- "reservation": parse_obj_as(ByteSize, "2Gib"),
+ "limit": TypeAdapter(ByteSize).validate_python("2Gib"),
+ "reservation": TypeAdapter(ByteSize).validate_python(
+ "2Gib"
+ ),
},
},
"boot_modes": [BootMode.CPU],
@@ -206,8 +212,10 @@ class Config:
"resources": {
"CPU": {"limit": 0.1, "reservation": 0.1},
"RAM": {
- "limit": parse_obj_as(ByteSize, "2Gib"),
- "reservation": parse_obj_as(ByteSize, "2Gib"),
+ "limit": TypeAdapter(ByteSize).validate_python("2Gib"),
+ "reservation": TypeAdapter(ByteSize).validate_python(
+ "2Gib"
+ ),
},
},
"boot_modes": [BootMode.CPU],
@@ -215,3 +223,4 @@ class Config:
},
]
}
+ )
diff --git a/packages/models-library/src/models_library/services_types.py b/packages/models-library/src/models_library/services_types.py
index 366d8bc00c20..87559eff9939 100644
--- a/packages/models-library/src/models_library/services_types.py
+++ b/packages/models-library/src/models_library/services_types.py
@@ -1,8 +1,8 @@
-import re
+from typing import Annotated, TypeAlias
from uuid import uuid4
import arrow
-from pydantic import ConstrainedStr
+from pydantic import StringConstraints
from .basic_regex import PROPERTY_KEY_RE, SIMPLE_VERSION_RE
from .services_regex import (
@@ -13,48 +13,21 @@
SERVICE_KEY_RE,
)
+ServicePortKey: TypeAlias = Annotated[str, StringConstraints(pattern=PROPERTY_KEY_RE)]
-class ServicePortKey(ConstrainedStr):
- regex = re.compile(PROPERTY_KEY_RE)
+FileName: TypeAlias = Annotated[str, StringConstraints(pattern=FILENAME_RE)]
- class Config:
- frozen = True
+ServiceKey: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_KEY_RE)]
+ServiceKeyEncoded: TypeAlias = Annotated[str, StringConstraints(pattern=SERVICE_ENCODED_KEY_RE)]
-class FileName(ConstrainedStr):
- regex = re.compile(FILENAME_RE)
+DynamicServiceKey: TypeAlias = Annotated[str, StringConstraints(pattern=DYNAMIC_SERVICE_KEY_RE)]
- class Config:
- frozen = True
+ComputationalServiceKey: TypeAlias = Annotated[
+ str, StringConstraints(pattern=COMPUTATIONAL_SERVICE_KEY_RE)
+]
-
-class ServiceKey(ConstrainedStr):
- regex = SERVICE_KEY_RE
-
- class Config:
- frozen = True
-
-
-class ServiceKeyEncoded(ConstrainedStr):
- regex = re.compile(SERVICE_ENCODED_KEY_RE)
-
- class Config:
- frozen = True
-
-
-class DynamicServiceKey(ServiceKey):
- regex = DYNAMIC_SERVICE_KEY_RE
-
-
-class ComputationalServiceKey(ServiceKey):
- regex = COMPUTATIONAL_SERVICE_KEY_RE
-
-
-class ServiceVersion(ConstrainedStr):
- regex = re.compile(SIMPLE_VERSION_RE)
-
- class Config:
- frozen = True
+ServiceVersion: TypeAlias = Annotated[str, StringConstraints(pattern=SIMPLE_VERSION_RE)]
class RunID(str):
diff --git a/packages/models-library/src/models_library/services_ui.py b/packages/models-library/src/models_library/services_ui.py
index 221966933343..055fa58fd7be 100644
--- a/packages/models-library/src/models_library/services_ui.py
+++ b/packages/models-library/src/models_library/services_ui.py
@@ -1,6 +1,6 @@
from enum import Enum
-from pydantic import BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field
from pydantic.types import PositiveInt
@@ -14,23 +14,20 @@ class TextArea(BaseModel):
..., alias="minHeight", description="minimum Height of the textarea"
)
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class Structure(BaseModel):
key: str | bool | float
label: str
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class SelectBox(BaseModel):
- structure: list[Structure] = Field(..., min_items=1)
+ structure: list[Structure] = Field(..., min_length=1)
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class Widget(BaseModel):
@@ -39,5 +36,4 @@ class Widget(BaseModel):
)
details: TextArea | SelectBox
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
diff --git a/packages/models-library/src/models_library/user_preferences.py b/packages/models-library/src/models_library/user_preferences.py
index 14d6b4e53f8d..2680c10223d2 100644
--- a/packages/models-library/src/models_library/user_preferences.py
+++ b/packages/models-library/src/models_library/user_preferences.py
@@ -1,15 +1,12 @@
from enum import auto
-from typing import Annotated, Any, ClassVar, TypeAlias
+from typing import Annotated, Any, ClassVar, Literal, TypeAlias, get_args
from pydantic import BaseModel, Field
-from pydantic.main import ModelMetaclass
+from pydantic._internal._model_construction import ModelMetaclass
from .services import ServiceKey, ServiceVersion
from .utils.enums import StrAutoEnum
-# NOTE: for pydantic-2 from pydantic._internal.import _model_construction
-# use _model_construction.ModelMetaclass instead!
-
class _AutoRegisterMeta(ModelMetaclass):
registered_user_preference_classes: ClassVar[dict[str, type]] = {}
@@ -77,14 +74,14 @@ def get_preference_name(cls) -> PreferenceName:
@classmethod
def get_default_value(cls) -> Any:
return (
- cls.__fields__["value"].default_factory()
- if cls.__fields__["value"].default_factory
- else cls.__fields__["value"].default
+ cls.model_fields["value"].default_factory()
+ if cls.model_fields["value"].default_factory
+ else cls.model_fields["value"].default
)
class FrontendUserPreference(_BaseUserPreferenceModel):
- preference_type: PreferenceType = Field(default=PreferenceType.FRONTEND, const=True)
+ preference_type: Literal[PreferenceType.FRONTEND] = PreferenceType.FRONTEND
preference_identifier: PreferenceIdentifier = Field(
..., description="used by the frontend"
@@ -93,11 +90,15 @@ class FrontendUserPreference(_BaseUserPreferenceModel):
value: Any
def to_db(self) -> dict:
- return self.dict(exclude={"preference_identifier", "preference_type"})
+ return self.model_dump(exclude={"preference_identifier", "preference_type"})
@classmethod
def update_preference_default_value(cls, new_default: Any) -> None:
- expected_type = cls.__fields__["value"].type_
+ expected_type = (
+ t[0]
+ if (t := get_args(cls.model_fields["value"].annotation))
+ else cls.model_fields["value"].annotation
+ )
detected_type = type(new_default)
if expected_type != detected_type:
msg = (
@@ -105,14 +106,14 @@ def update_preference_default_value(cls, new_default: Any) -> None:
)
raise TypeError(msg)
- if cls.__fields__["value"].default is None:
- cls.__fields__["value"].default_factory = lambda: new_default
+ if cls.model_fields["value"].default is None:
+ cls.model_fields["value"].default_factory = lambda: new_default
else:
- cls.__fields__["value"].default = new_default
+ cls.model_fields["value"].default = new_default
class UserServiceUserPreference(_BaseUserPreferenceModel):
- preference_type: PreferenceType = Field(PreferenceType.USER_SERVICE, const=True)
+ preference_type: Literal[PreferenceType.USER_SERVICE] = PreferenceType.USER_SERVICE
service_key: ServiceKey = Field(
..., description="the service which manages the preferences"
@@ -122,7 +123,7 @@ class UserServiceUserPreference(_BaseUserPreferenceModel):
)
def to_db(self) -> dict:
- return self.dict(exclude={"preference_type"})
+ return self.model_dump(exclude={"preference_type"})
AnyUserPreference: TypeAlias = Annotated[
diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py
index 31ca948a1b87..8cb7793d2f83 100644
--- a/packages/models-library/src/models_library/users.py
+++ b/packages/models-library/src/models_library/users.py
@@ -1,18 +1,14 @@
-from typing import TypeAlias
+from typing import Annotated, TypeAlias
-from pydantic import BaseModel, ConstrainedStr, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt, StringConstraints
UserID: TypeAlias = PositiveInt
GroupID: TypeAlias = PositiveInt
-class FirstNameStr(ConstrainedStr):
- strip_whitespace = True
- max_length = 255
+FirstNameStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True, max_length=255)]
-
-class LastNameStr(FirstNameStr):
- ...
+LastNameStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True, max_length=255)]
class UserBillingDetails(BaseModel):
@@ -26,5 +22,4 @@ class UserBillingDetails(BaseModel):
postal_code: str | None
phone: str | None
- class Config:
- orm_mode = True
+ model_config = ConfigDict(from_attributes=True)
diff --git a/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py b/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py
index 6f49b02a5ba8..b48628b9ab95 100644
--- a/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py
+++ b/packages/models-library/src/models_library/utils/_original_fastapi_encoders.py
@@ -3,22 +3,25 @@
# wget https://raw.githubusercontent.com/tiangolo/fastapi/master/fastapi/encoders.py --output-document=_original_fastapi_encoders
#
import dataclasses
-from collections import defaultdict
-from collections.abc import Callable
+from collections import defaultdict, deque
from enum import Enum
from pathlib import PurePath
from types import GeneratorType
-from typing import Any
+from typing import Any, Callable, Union
+from models_library.utils.json_serialization import ENCODERS_BY_TYPE
from pydantic import BaseModel
-from pydantic.json import ENCODERS_BY_TYPE
+from pydantic_core import PydanticUndefined, PydanticUndefinedType
+from typing_extensions import Annotated, Doc
-SetIntStr = set[int | str]
-DictIntStrAny = dict[int | str, Any]
+Undefined = PydanticUndefined
+UndefinedType = PydanticUndefinedType
+
+IncEx = Union[set[int], set[str], dict[int, Any], dict[str, Any]]
def generate_encoders_by_class_tuples(
- type_encoder_map: dict[Any, Callable[[Any], Any]]
+ type_encoder_map: dict[Any, Callable[[Any], Any]],
) -> dict[Callable[[Any], Any], tuple[Any, ...]]:
encoders_by_class_tuples: dict[Callable[[Any], Any], tuple[Any, ...]] = defaultdict(
tuple
@@ -32,32 +35,123 @@ def generate_encoders_by_class_tuples(
def jsonable_encoder(
- obj: Any,
- include: SetIntStr | DictIntStrAny | None = None,
- exclude: SetIntStr | DictIntStrAny | None = None,
- by_alias: bool = True,
- exclude_unset: bool = False,
- exclude_defaults: bool = False,
- exclude_none: bool = False,
- custom_encoder: dict[Any, Callable[[Any], Any]] | None = None,
- sqlalchemy_safe: bool = True,
+ obj: Annotated[
+ Any,
+ Doc(
+ """
+ The input object to convert to JSON.
+ """
+ ),
+ ],
+ include: Annotated[
+ IncEx | None,
+ Doc(
+ """
+ Pydantic's `include` parameter, passed to Pydantic models to set the
+ fields to include.
+ """
+ ),
+ ] = None,
+ exclude: Annotated[
+ IncEx | None,
+ Doc(
+ """
+ Pydantic's `exclude` parameter, passed to Pydantic models to set the
+ fields to exclude.
+ """
+ ),
+ ] = None,
+ by_alias: Annotated[
+ bool,
+ Doc(
+ """
+ Pydantic's `by_alias` parameter, passed to Pydantic models to define if
+ the output should use the alias names (when provided) or the Python
+ attribute names. In an API, if you set an alias, it's probably because you
+ want to use it in the result, so you probably want to leave this set to
+ `True`.
+ """
+ ),
+ ] = True,
+ exclude_unset: Annotated[
+ bool,
+ Doc(
+ """
+ Pydantic's `exclude_unset` parameter, passed to Pydantic models to define
+ if it should exclude from the output the fields that were not explicitly
+ set (and that only had their default values).
+ """
+ ),
+ ] = False,
+ exclude_defaults: Annotated[
+ bool,
+ Doc(
+ """
+ Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define
+ if it should exclude from the output the fields that had the same default
+ value, even when they were explicitly set.
+ """
+ ),
+ ] = False,
+ exclude_none: Annotated[
+ bool,
+ Doc(
+ """
+ Pydantic's `exclude_none` parameter, passed to Pydantic models to define
+ if it should exclude from the output any fields that have a `None` value.
+ """
+ ),
+ ] = False,
+ custom_encoder: Annotated[
+ dict[Any, Callable[[Any], Any]] | None,
+ Doc(
+ """
+ Pydantic's `custom_encoder` parameter, passed to Pydantic models to define
+ a custom encoder.
+ """
+ ),
+ ] = None,
+ sqlalchemy_safe: Annotated[
+ bool,
+ Doc(
+ """
+ Exclude from the output any fields that start with the name `_sa`.
+
+ This is mainly a hack for compatibility with SQLAlchemy objects, they
+ store internal SQLAlchemy-specific state in attributes named with `_sa`,
+ and those objects can't (and shouldn't be) serialized to JSON.
+ """
+ ),
+ ] = True,
) -> Any:
+ """
+ Convert any object to something that can be encoded in JSON.
+
+ This is used internally by FastAPI to make sure anything you return can be
+ encoded as JSON before it is sent to the client.
+
+ You can also use it yourself, for example to convert objects before saving them
+ in a database that supports only JSON.
+
+ Read more about it in the
+ [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/).
+ """
custom_encoder = custom_encoder or {}
if custom_encoder:
if type(obj) in custom_encoder:
return custom_encoder[type(obj)](obj)
- for encoder_type, encoder_instance in custom_encoder.items():
- if isinstance(obj, encoder_type):
- return encoder_instance(obj)
- if include is not None and not isinstance(include, set | dict):
+ else:
+ for encoder_type, encoder_instance in custom_encoder.items():
+ if isinstance(obj, encoder_type):
+ return encoder_instance(obj)
+ if include is not None and not isinstance(include, (set, dict)):
include = set(include)
- if exclude is not None and not isinstance(exclude, set | dict):
+ if exclude is not None and not isinstance(exclude, (set, dict)):
exclude = set(exclude)
if isinstance(obj, BaseModel):
- encoder = getattr(obj.__config__, "json_encoders", {})
- if custom_encoder:
- encoder.update(custom_encoder)
- obj_dict = obj.dict(
+ obj_dict = BaseModel.model_dump(
+ obj,
+ mode="json",
include=include,
exclude=exclude,
by_alias=by_alias,
@@ -71,7 +165,6 @@ def jsonable_encoder(
obj_dict,
exclude_none=exclude_none,
exclude_defaults=exclude_defaults,
- custom_encoder=encoder,
sqlalchemy_safe=sqlalchemy_safe,
)
if dataclasses.is_dataclass(obj):
@@ -91,8 +184,10 @@ def jsonable_encoder(
return obj.value
if isinstance(obj, PurePath):
return str(obj)
- if isinstance(obj, str | int | float | type(None)):
+ if isinstance(obj, (str, int, float, type(None))):
return obj
+ if isinstance(obj, UndefinedType):
+ return None
if isinstance(obj, dict):
encoded_dict = {}
allowed_keys = set(obj.keys())
@@ -128,7 +223,7 @@ def jsonable_encoder(
)
encoded_dict[encoded_key] = encoded_value
return encoded_dict
- if isinstance(obj, list | set | frozenset | GeneratorType | tuple):
+ if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)):
encoded_list = []
for item in obj:
encoded_list.append(
@@ -161,7 +256,7 @@ def jsonable_encoder(
data = vars(obj)
except Exception as e:
errors.append(e)
- raise ValueError(errors)
+ raise ValueError(errors) from e
return jsonable_encoder(
data,
include=include,
diff --git a/packages/models-library/src/models_library/utils/json_serialization.py b/packages/models-library/src/models_library/utils/json_serialization.py
index cc87c6860413..69ffb00572dd 100644
--- a/packages/models-library/src/models_library/utils/json_serialization.py
+++ b/packages/models-library/src/models_library/utils/json_serialization.py
@@ -3,12 +3,28 @@
- implemented using orjson, which performs better. SEE https://github.com/ijl/orjson?tab=readme-ov-file#performance
"""
+import datetime
+from collections import deque
from collections.abc import Callable
+from decimal import Decimal
+from enum import Enum
+from ipaddress import (
+ IPv4Address,
+ IPv4Interface,
+ IPv4Network,
+ IPv6Address,
+ IPv6Interface,
+ IPv6Network,
+)
+from pathlib import Path
+from re import Pattern
+from types import GeneratorType
from typing import Any, Final, NamedTuple
+from uuid import UUID
import orjson
-from pydantic.json import ENCODERS_BY_TYPE, pydantic_encoder
-from pydantic.types import ConstrainedFloat
+from pydantic import NameEmail, SecretBytes, SecretStr
+from pydantic_extra_types.color import Color
class SeparatorTuple(NamedTuple):
@@ -16,12 +32,86 @@ class SeparatorTuple(NamedTuple):
key_separator: str
-# Extends encoders for pydantic_encoder
-ENCODERS_BY_TYPE[ConstrainedFloat] = float
-
_orjson_default_separator: Final = SeparatorTuple(item_separator=",", key_separator=":")
+def isoformat(o: datetime.date | datetime.time) -> str:
+ return o.isoformat()
+
+
+def decimal_encoder(dec_value: Decimal) -> int | float:
+ """
+ Encodes a Decimal as int of there's no exponent, otherwise float
+
+ This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
+ where a integer (but not int typed) is used. Encoding this as a float
+ results in failed round-tripping between encode and parse.
+ Our Id type is a prime example of this.
+
+ >>> decimal_encoder(Decimal("1.0"))
+ 1.0
+
+ >>> decimal_encoder(Decimal("1"))
+ 1
+ """
+ if dec_value.as_tuple().exponent >= 0: # type: ignore[operator]
+ return int(dec_value)
+ else:
+ return float(dec_value)
+
+
+ENCODERS_BY_TYPE: dict[type[Any], Callable[[Any], Any]] = {
+ bytes: lambda o: o.decode(),
+ Color: str,
+ datetime.date: isoformat,
+ datetime.datetime: isoformat,
+ datetime.time: isoformat,
+ datetime.timedelta: lambda td: td.total_seconds(),
+ Decimal: decimal_encoder,
+ Enum: lambda o: o.value,
+ frozenset: list,
+ deque: list,
+ GeneratorType: list,
+ IPv4Address: str,
+ IPv4Interface: str,
+ IPv4Network: str,
+ IPv6Address: str,
+ IPv6Interface: str,
+ IPv6Network: str,
+ NameEmail: str,
+ Path: str,
+ Pattern: lambda o: o.pattern,
+ SecretBytes: str,
+ SecretStr: str,
+ set: list,
+ UUID: str,
+}
+
+
+def pydantic_encoder(obj: Any) -> Any:
+ from dataclasses import asdict, is_dataclass
+
+ from pydantic.main import BaseModel
+
+ if isinstance(obj, BaseModel):
+ return obj.model_dump()
+ elif is_dataclass(obj):
+ return asdict(obj) # type: ignore[call-overload]
+
+ # Check the class type and its superclasses for a matching encoder
+ for base in obj.__class__.__mro__[:-1]:
+ try:
+ encoder = ENCODERS_BY_TYPE[base]
+ except KeyError:
+ continue
+ return encoder(obj)
+
+ # We have exited the for loop without finding a suitable encoder
+ raise TypeError(
+ f"Object of type '{obj.__class__.__name__}' is not JSON serializable"
+ )
+
+
def json_dumps(
obj: Any,
*,
diff --git a/packages/models-library/src/models_library/utils/pydantic_tools_extension.py b/packages/models-library/src/models_library/utils/pydantic_tools_extension.py
index 08e70fb92aa1..1078120a0a6c 100644
--- a/packages/models-library/src/models_library/utils/pydantic_tools_extension.py
+++ b/packages/models-library/src/models_library/utils/pydantic_tools_extension.py
@@ -1,15 +1,14 @@
import functools
from typing import Final, TypeVar
-from pydantic import Field, ValidationError
-from pydantic.tools import parse_obj_as
+from pydantic import Field, TypeAdapter, ValidationError
T = TypeVar("T")
def parse_obj_or_none(type_: type[T], obj) -> T | None:
try:
- return parse_obj_as(type_, obj)
+ return TypeAdapter(type_).validate_python(obj)
except ValidationError:
return None
diff --git a/packages/models-library/src/models_library/utils/specs_substitution.py b/packages/models-library/src/models_library/utils/specs_substitution.py
index f12968136f68..73260237b22b 100644
--- a/packages/models-library/src/models_library/utils/specs_substitution.py
+++ b/packages/models-library/src/models_library/utils/specs_substitution.py
@@ -1,7 +1,7 @@
from typing import Any, NamedTuple, TypeAlias, cast
+from models_library.errors_classes import OsparcErrorMixin
from pydantic import StrictBool, StrictFloat, StrictInt
-from pydantic.errors import PydanticErrorMixin
from .json_serialization import json_dumps, json_loads
from .string_substitution import (
@@ -15,7 +15,7 @@
SubstitutionValue: TypeAlias = StrictBool | StrictInt | StrictFloat | str
-class IdentifierSubstitutionError(PydanticErrorMixin, KeyError):
+class IdentifierSubstitutionError(OsparcErrorMixin, KeyError):
msg_template: str = (
"Was not able to substitute identifier "
"'{name}'. It was not found in: {substitutions}"
diff --git a/packages/models-library/src/models_library/wallets.py b/packages/models-library/src/models_library/wallets.py
index 08651353daae..29d122269728 100644
--- a/packages/models-library/src/models_library/wallets.py
+++ b/packages/models-library/src/models_library/wallets.py
@@ -1,9 +1,9 @@
from datetime import datetime
from decimal import Decimal
from enum import auto
-from typing import Any, ClassVar, TypeAlias
+from typing import TypeAlias
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt
from .utils.enums import StrAutoEnum
@@ -20,16 +20,17 @@ class WalletInfo(BaseModel):
wallet_name: str
wallet_credit_amount: Decimal
- class Config:
- schema_extra: ClassVar[dict[str, Any]] = {
+ model_config = ConfigDict(
+ json_schema_extra={
"examples": [
{
"wallet_id": 1,
"wallet_name": "My Wallet",
- "wallet_credit_amount": Decimal(10),
+ "wallet_credit_amount": Decimal(10), # type: ignore[dict-item]
}
]
}
+ )
ZERO_CREDITS = Decimal(0)
diff --git a/packages/models-library/src/models_library/workspaces.py b/packages/models-library/src/models_library/workspaces.py
index c08e02501cb3..e1d0f8d17fde 100644
--- a/packages/models-library/src/models_library/workspaces.py
+++ b/packages/models-library/src/models_library/workspaces.py
@@ -3,7 +3,7 @@
from models_library.access_rights import AccessRights
from models_library.users import GroupID
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import BaseModel, ConfigDict, Field, PositiveInt
WorkspaceID: TypeAlias = PositiveInt
@@ -31,13 +31,11 @@ class WorkspaceDB(BaseModel):
description="Timestamp of last modification",
)
- class Config:
- orm_mode = True
+ model_config = ConfigDict(from_attributes=True)
class UserWorkspaceAccessRightsDB(WorkspaceDB):
my_access_rights: AccessRights
access_rights: dict[GroupID, AccessRights]
- class Config:
- orm_mode = True
+ model_config = ConfigDict(from_attributes=True)
diff --git a/packages/models-library/tests/test__pydantic_models.py b/packages/models-library/tests/test__pydantic_models.py
index 716cf9f79068..548d34f6569e 100644
--- a/packages/models-library/tests/test__pydantic_models.py
+++ b/packages/models-library/tests/test__pydantic_models.py
@@ -11,8 +11,9 @@
import pytest
from models_library.projects_nodes import InputTypes, OutputTypes
from models_library.projects_nodes_io import SimCoreFileLink
-from pydantic import BaseModel, ValidationError, schema_json_of
+from pydantic import BaseModel, Field, ValidationError, schema_json_of
from pydantic.types import Json
+from pydantic.version import version_short
# NOTE: pydantic at a glance (just a few key features):
#
@@ -66,26 +67,31 @@ class ArgumentAnnotation(BaseModel):
ArgumentAnnotation(**x_annotation.dict())
assert exc_info.value.errors()[0] == {
+ "input": {"items": {"type": "integer"}, "title": "schema[x]", "type": "array"},
"loc": ("data_schema",),
- "msg": "JSON object must be str, bytes or bytearray",
- "type": "type_error.json",
+ "msg": "JSON input should be string, bytes or bytearray",
+ "type": "json_type",
+ "url": f"https://errors.pydantic.dev/{version_short()}/v/json_type",
}
with pytest.raises(ValidationError) as exc_info:
ArgumentAnnotation(name="foo", data_schema="invalid-json")
assert exc_info.value.errors()[0] == {
+ "ctx": {"error": "expected value at line 1 column 1"},
+ "input": "invalid-json",
"loc": ("data_schema",),
- "msg": "Invalid JSON",
- "type": "value_error.json",
+ "msg": "Invalid JSON: expected value at line 1 column 1",
+ "type": "json_invalid",
+ "url": f"https://errors.pydantic.dev/{version_short()}/v/json_invalid",
}
def test_union_types_coercion():
# SEE https://pydantic-docs.helpmanual.io/usage/types/#unions
class Func(BaseModel):
- input: InputTypes
- output: OutputTypes
+ input: InputTypes = Field(union_mode="left_to_right")
+ output: OutputTypes = Field(union_mode="left_to_right")
assert get_origin(InputTypes) is Union
assert get_origin(OutputTypes) is Union
@@ -94,45 +100,49 @@ class Func(BaseModel):
# NOTE: it is recommended that, when defining Union annotations, the most specific type is included first and followed by less specific types.
#
- assert Func.schema()["properties"]["input"] == {
+ assert Func.model_json_schema()["properties"]["input"] == {
"title": "Input",
"anyOf": [
{"type": "boolean"},
{"type": "integer"},
{"type": "number"},
- {"format": "json-string", "type": "string"},
+ {
+ "contentMediaType": "application/json",
+ "contentSchema": {},
+ "type": "string",
+ },
{"type": "string"},
- {"$ref": "#/definitions/PortLink"},
- {"$ref": "#/definitions/SimCoreFileLink"},
- {"$ref": "#/definitions/DatCoreFileLink"},
- {"$ref": "#/definitions/DownloadLink"},
+ {"$ref": "#/$defs/PortLink"},
+ {"$ref": "#/$defs/SimCoreFileLink"},
+ {"$ref": "#/$defs/DatCoreFileLink"},
+ {"$ref": "#/$defs/DownloadLink"},
{"type": "array", "items": {}},
{"type": "object"},
],
}
# integers ------------------------
- model = Func.parse_obj({"input": "0", "output": 1})
- print(model.json(indent=1))
+ model = Func.model_validate({"input": "0", "output": 1})
+ print(model.model_dump_json(indent=1))
assert model.input == 0
assert model.output == 1
# numbers and bool ------------------------
- model = Func.parse_obj({"input": "0.5", "output": "false"})
- print(model.json(indent=1))
+ model = Func.model_validate({"input": "0.5", "output": "false"})
+ print(model.model_dump_json(indent=1))
assert model.input == 0.5
assert model.output is False
# (undefined) json string vs string ------------------------
- model = Func.parse_obj(
+ model = Func.model_validate(
{
"input": '{"w": 42, "z": false}', # NOTE: this is a raw json string
"output": "some/path/or/string",
}
)
- print(model.json(indent=1))
+ print(model.model_dump_json(indent=1))
assert model.input == {"w": 42, "z": False}
assert model.output == "some/path/or/string"
@@ -140,24 +150,26 @@ class Func(BaseModel):
# (undefined) json string vs SimCoreFileLink.dict() ------------
MINIMAL = 2 # <--- index of the example with the minimum required fields
assert SimCoreFileLink in get_args(OutputTypes)
- example = SimCoreFileLink.parse_obj(
- SimCoreFileLink.Config.schema_extra["examples"][MINIMAL]
+ example = SimCoreFileLink.model_validate(
+ SimCoreFileLink.model_config["json_schema_extra"]["examples"][MINIMAL]
)
- model = Func.parse_obj(
+ model = Func.model_validate(
{
"input": '{"w": 42, "z": false}',
- "output": example.dict(
+ "output": example.model_dump(
exclude_unset=True
), # NOTE: this is NOT a raw json string
}
)
- print(model.json(indent=1))
+ print(model.model_dump_json(indent=1))
assert model.input == {"w": 42, "z": False}
assert model.output == example
assert isinstance(model.output, SimCoreFileLink)
# json array and objects
- model = Func.parse_obj({"input": {"w": 42, "z": False}, "output": [1, 2, 3, None]})
- print(model.json(indent=1))
+ model = Func.model_validate(
+ {"input": {"w": 42, "z": False}, "output": [1, 2, 3, None]}
+ )
+ print(model.model_dump_json(indent=1))
assert model.input == {"w": 42, "z": False}
assert model.output == [1, 2, 3, None]
diff --git a/packages/models-library/tests/test__pydantic_models_and_enums.py b/packages/models-library/tests/test__pydantic_models_and_enums.py
index 51b4151fecbd..51f0226ee80d 100644
--- a/packages/models-library/tests/test__pydantic_models_and_enums.py
+++ b/packages/models-library/tests/test__pydantic_models_and_enums.py
@@ -110,8 +110,8 @@ def test_parsing_strenum_in_pydantic():
def test_parsing_str_and_enum_in_pydantic():
# Can still NOT parse equilalent enum(-only)
- with pytest.raises(ValidationError):
- parse_obj_as(ModelStrAndEnum, {"color": Color1.RED})
+ # with pytest.raises(ValidationError):
+ # parse_obj_as(ModelStrAndEnum, {"color": Color1.RED})
# And the opposite? NO!!!
with pytest.raises(ValidationError):
diff --git a/packages/models-library/tests/test_basic_types.py b/packages/models-library/tests/test_basic_types.py
index e2077d173d15..dbd847246cf9 100644
--- a/packages/models-library/tests/test_basic_types.py
+++ b/packages/models-library/tests/test_basic_types.py
@@ -6,15 +6,15 @@
IDStr,
MD5Str,
SHA1Str,
+ ShortTruncatedStr,
UUIDStr,
VersionTag,
)
-from pydantic import ConstrainedStr, ValidationError
-from pydantic.tools import parse_obj_as
+from pydantic import TypeAdapter, ValidationError
class _Example(NamedTuple):
- constr: type[ConstrainedStr]
+ constr: type[str]
good: str
bad: str
@@ -49,27 +49,43 @@ class _Example(NamedTuple):
"constraint_str_type,sample",
[(p.constr, p.good) for p in _EXAMPLES],
)
-def test_constrained_str_succeeds(
- constraint_str_type: type[ConstrainedStr], sample: str
-):
- assert parse_obj_as(constraint_str_type, sample) == sample
+def test_constrained_str_succeeds(constraint_str_type: type[str], sample: str):
+ assert TypeAdapter(constraint_str_type).validate_python(sample) == sample
@pytest.mark.parametrize(
"constraint_str_type,sample",
[(p.constr, p.bad) for p in _EXAMPLES],
)
-def test_constrained_str_fails(constraint_str_type: type[ConstrainedStr], sample: str):
+def test_constrained_str_fails(constraint_str_type: type[str], sample: str):
with pytest.raises(ValidationError):
- parse_obj_as(constraint_str_type, sample)
+ TypeAdapter(constraint_str_type).validate_python(sample)
def test_string_identifier_constraint_type():
# strip spaces
- assert parse_obj_as(IDStr, " 123 trim spaces ") == "123 trim spaces"
+ assert (
+ TypeAdapter(IDStr).validate_python(" 123 trim spaces ") == "123 trim spaces"
+ )
# limited to 100!
- parse_obj_as(IDStr, "X" * 100)
+ TypeAdapter(IDStr).validate_python("X" * IDStr.max_length)
with pytest.raises(ValidationError):
- parse_obj_as(IDStr, "X" * 101)
+ TypeAdapter(IDStr).validate_python("X" * (IDStr.max_length + 1))
+
+
+def test_short_truncated_string():
+ assert (
+ TypeAdapter(ShortTruncatedStr).validate_python(
+ "X" * ShortTruncatedStr.curtail_length
+ )
+ == "X" * ShortTruncatedStr.curtail_length
+ )
+
+ assert (
+ TypeAdapter(ShortTruncatedStr).validate_python(
+ "X" * (ShortTruncatedStr.curtail_length + 1)
+ )
+ == "X" * ShortTruncatedStr.curtail_length
+ )
diff --git a/packages/models-library/tests/test_docker.py b/packages/models-library/tests/test_docker.py
index 2fddd55419ae..55cb9419bbcb 100644
--- a/packages/models-library/tests/test_docker.py
+++ b/packages/models-library/tests/test_docker.py
@@ -103,7 +103,7 @@ def test_docker_generic_tag(image_name: str, valid: bool):
@pytest.mark.parametrize(
"obj_data",
- StandardSimcoreDockerLabels.Config.schema_extra["examples"],
+ StandardSimcoreDockerLabels.model_config["json_schema_extra"]["examples"],
ids=str,
)
def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]):
diff --git a/packages/models-library/tests/test_emails.py b/packages/models-library/tests/test_emails.py
index 42ae8c84f1fe..f2b431c55d3b 100644
--- a/packages/models-library/tests/test_emails.py
+++ b/packages/models-library/tests/test_emails.py
@@ -1,14 +1,21 @@
import pytest
from models_library.emails import LowerCaseEmailStr
-from pydantic import BaseModel
+from pydantic import BaseModel, ValidationError
+
+
+class Profile(BaseModel):
+ email: LowerCaseEmailStr
@pytest.mark.parametrize(
"email_input", ["bla@gmail.com", "BlA@gMaIL.com", "BLA@GMAIL.COM"]
)
def test_lowercase_email(email_input: str):
- class Profile(BaseModel):
- email: LowerCaseEmailStr
-
data = Profile(email=email_input)
assert data.email == "bla@gmail.com"
+
+
+@pytest.mark.parametrize("email_input", ["blagmail.com", "BlA@.com", "bLA@", ""])
+def test_malformed_email(email_input: str):
+ with pytest.raises(ValidationError):
+ Profile(email=email_input)
diff --git a/packages/models-library/tests/test_errors.py b/packages/models-library/tests/test_errors.py
index 6b10f6bcbddb..82cf979e463f 100644
--- a/packages/models-library/tests/test_errors.py
+++ b/packages/models-library/tests/test_errors.py
@@ -5,7 +5,9 @@
import pytest
from models_library.errors import ErrorDict
-from pydantic import BaseModel, ValidationError, conint
+from pydantic import BaseModel, Field, ValidationError
+from pydantic.version import version_short
+from typing_extensions import Annotated
def test_pydantic_error_dict():
@@ -13,7 +15,7 @@ class B(BaseModel):
y: list[int]
class A(BaseModel):
- x: conint(ge=2)
+ x: Annotated[int, Field(ge=2)]
b: B
with pytest.raises(ValidationError) as exc_info:
@@ -34,13 +36,15 @@ def _copy(d, exclude):
return {k: v for k, v in d.items() if k not in exclude}
assert _copy(errors[0], exclude={"msg"}) == {
+ "ctx": {"ge": 2},
+ "input": -1,
"loc": ("x",),
- # "msg": "ensure this value is...equal to 2",
- "type": "value_error.number.not_ge",
- "ctx": {"limit_value": 2},
+ "type": "greater_than_equal",
+ "url": f"https://errors.pydantic.dev/{version_short()}/v/greater_than_equal",
}
assert _copy(errors[1], exclude={"msg"}) == {
+ "input": "wrong",
"loc": ("b", "y", 1),
- # "msg": "value is not a valid integer",
- "type": "type_error.integer",
+ "type": "int_parsing",
+ "url": f"https://errors.pydantic.dev/{version_short()}/v/int_parsing",
}
diff --git a/packages/models-library/tests/test_errors_classes.py b/packages/models-library/tests/test_errors_classes.py
index 4372c4eb8e5c..754367805e23 100644
--- a/packages/models-library/tests/test_errors_classes.py
+++ b/packages/models-library/tests/test_errors_classes.py
@@ -10,7 +10,6 @@
import pytest
from models_library.errors_classes import OsparcErrorMixin
-from pydantic.errors import PydanticErrorMixin
def test_get_full_class_name():
@@ -138,13 +137,7 @@ class MyError(OsparcErrorMixin, ValueError):
def test_missing_keys_in_msg_template_does_not_raise():
- class MyErrorBefore(PydanticErrorMixin, ValueError):
- msg_template = "{value} and {missing}"
-
- with pytest.raises(KeyError, match="missing"):
- str(MyErrorBefore(value=42))
-
- class MyErrorAfter(OsparcErrorMixin, ValueError):
+ class MyError(OsparcErrorMixin, ValueError):
msg_template = "{value} and {missing}"
- assert str(MyErrorAfter(value=42)) == "42 and 'missing=?'"
+ assert str(MyError(value=42)) == "42 and 'missing=?'"
diff --git a/packages/models-library/tests/test_function_services_catalog.py b/packages/models-library/tests/test_function_services_catalog.py
index 0844ed29a4eb..b5f0c21b0bc6 100644
--- a/packages/models-library/tests/test_function_services_catalog.py
+++ b/packages/models-library/tests/test_function_services_catalog.py
@@ -31,7 +31,7 @@ def test_catalog_frontend_services_registry():
registry = {(s.key, s.version): s for s in iter_service_docker_data()}
for s in registry.values():
- print(s.json(exclude_unset=True, indent=1))
+ print(s.model_dump_json(exclude_unset=True, indent=1))
# one version per front-end service?
versions_per_service = defaultdict(list)
diff --git a/packages/models-library/tests/test_generics.py b/packages/models-library/tests/test_generics.py
index a1201701fd8d..b778cd4a490f 100644
--- a/packages/models-library/tests/test_generics.py
+++ b/packages/models-library/tests/test_generics.py
@@ -11,6 +11,7 @@
from faker import Faker
from models_library.generics import DictModel, Envelope
from pydantic import BaseModel, ValidationError
+from pydantic.version import version_short
def test_dict_base_model():
@@ -86,7 +87,7 @@ def test_enveloped_data_builtin(builtin_type: type, builtin_value: Any):
def test_enveloped_data_model():
class User(BaseModel):
idr: int
- name = "Jane Doe"
+ name: str = "Jane Doe"
enveloped = Envelope[User](data={"idr": 3})
@@ -102,9 +103,11 @@ def test_enveloped_data_dict():
error: ValidationError = err_info.value
assert error.errors() == [
{
+ "input": "not-a-dict",
"loc": ("data",),
- "msg": "value is not a valid dict",
- "type": "type_error.dict",
+ "msg": "Input should be a valid dictionary",
+ "type": "dict_type",
+ "url": f"https://errors.pydantic.dev/{version_short()}/v/dict_type",
}
]
@@ -122,9 +125,11 @@ def test_enveloped_data_list():
error: ValidationError = err_info.value
assert error.errors() == [
{
+ "input": "not-a-list",
"loc": ("data",),
- "msg": "value is not a valid list",
- "type": "type_error.list",
+ "msg": "Input should be a valid list",
+ "type": "list_type",
+ "url": f"https://errors.pydantic.dev/{version_short()}/v/list_type",
}
]
diff --git a/packages/models-library/tests/test_project_nodes_io.py b/packages/models-library/tests/test_project_nodes_io.py
index 992c4d1f6048..aac9568eccb9 100644
--- a/packages/models-library/tests/test_project_nodes_io.py
+++ b/packages/models-library/tests/test_project_nodes_io.py
@@ -96,9 +96,15 @@ def test_store_discriminator():
},
}
- datacore_node = Node.parse_obj(workbench["89f95b67-a2a3-4215-a794-2356684deb61"])
- rawgraph_node = Node.parse_obj(workbench["88119776-e869-4df2-a529-4aae9d9fa35c"])
- simcore_node = Node.parse_obj(workbench["75c1707c-ec1c-49ac-a7bf-af6af9088f38"])
+ datacore_node = Node.model_validate(
+ workbench["89f95b67-a2a3-4215-a794-2356684deb61"]
+ )
+ rawgraph_node = Node.model_validate(
+ workbench["88119776-e869-4df2-a529-4aae9d9fa35c"]
+ )
+ simcore_node = Node.model_validate(
+ workbench["75c1707c-ec1c-49ac-a7bf-af6af9088f38"]
+ )
# must cast to the right subclass within project_nodes.py's InputTypes and OutputTypes unions
assert datacore_node.outputs
diff --git a/packages/models-library/tests/test_rabbit_messages.py b/packages/models-library/tests/test_rabbit_messages.py
index 8c95af75e679..519d54c43e84 100644
--- a/packages/models-library/tests/test_rabbit_messages.py
+++ b/packages/models-library/tests/test_rabbit_messages.py
@@ -8,7 +8,7 @@
ProgressRabbitMessageProject,
ProgressType,
)
-from pydantic import parse_raw_as
+from pydantic import TypeAdapter
faker = Faker()
@@ -19,29 +19,28 @@
pytest.param(
ProgressRabbitMessageNode(
project_id=faker.uuid4(cast_to=None),
- user_id=faker.uuid4(cast_to=None),
+ user_id=faker.pyint(min_value=1),
node_id=faker.uuid4(cast_to=None),
progress_type=ProgressType.SERVICE_OUTPUTS_PULLING,
report=ProgressReport(actual_value=0.4, total=1),
- ).json(),
+ ).model_dump_json(),
ProgressRabbitMessageNode,
id="node_progress",
),
pytest.param(
ProgressRabbitMessageProject(
project_id=faker.uuid4(cast_to=None),
- user_id=faker.uuid4(cast_to=None),
+ user_id=faker.pyint(min_value=1),
progress_type=ProgressType.PROJECT_CLOSING,
report=ProgressReport(actual_value=0.4, total=1),
- ).json(),
+ ).model_dump_json(),
ProgressRabbitMessageProject,
id="project_progress",
),
],
)
async def test_raw_message_parsing(raw_data: str, class_type: type):
- result = parse_raw_as(
- Union[ProgressRabbitMessageNode, ProgressRabbitMessageProject],
- raw_data,
- )
+ result = TypeAdapter(
+ Union[ProgressRabbitMessageNode, ProgressRabbitMessageProject]
+ ).validate_json(raw_data)
assert type(result) == class_type
diff --git a/packages/models-library/tests/test_rest_pagination.py b/packages/models-library/tests/test_rest_pagination.py
index a9da9db2f1be..d0f5f9f7d92c 100644
--- a/packages/models-library/tests/test_rest_pagination.py
+++ b/packages/models-library/tests/test_rest_pagination.py
@@ -7,7 +7,7 @@
@pytest.mark.parametrize("cls_model", [Page[str], PageMetaInfoLimitOffset])
def test_page_response_limit_offset_models(cls_model: BaseModel):
- examples = cls_model.Config.schema_extra["examples"]
+ examples = cls_model.model_config["json_schema_extra"]["examples"]
for index, example in enumerate(examples):
print(f"{index:-^10}:\n", example)
@@ -35,14 +35,14 @@ def test_invalid_count(count: int, offset: int):
def test_data_size_does_not_fit_count():
- example = deepcopy(Page[str].Config.schema_extra["examples"][0])
+ example = deepcopy(Page[str].model_config["json_schema_extra"]["examples"][0])
example["_meta"]["count"] = len(example["data"]) - 1
with pytest.raises(ValueError):
Page[str](**example)
def test_empty_data_is_converted_to_list():
- example = deepcopy(Page[str].Config.schema_extra["examples"][0])
+ example = deepcopy(Page[str].model_config["json_schema_extra"]["examples"][0])
example["data"] = None
example["_meta"]["count"] = 0
model_instance = Page[str](**example)
diff --git a/packages/models-library/tests/test_rpc_pagination.py b/packages/models-library/tests/test_rpc_pagination.py
index 787aba4daa9e..26931b9032ec 100644
--- a/packages/models-library/tests/test_rpc_pagination.py
+++ b/packages/models-library/tests/test_rpc_pagination.py
@@ -4,10 +4,12 @@
from models_library.rpc_pagination import PageRpc
-@pytest.mark.parametrize("example", PageRpc.Config.schema_extra["examples"])
+@pytest.mark.parametrize(
+ "example", PageRpc.model_config["json_schema_extra"]["examples"]
+)
def test_create_page_rpc(example: dict[str, Any]):
- expected = PageRpc.parse_obj(example)
+ expected = PageRpc.model_validate(example)
assert PageRpc[str].create(
expected.data,
diff --git a/packages/models-library/tests/test_service_resources.py b/packages/models-library/tests/test_service_resources.py
index c119a33e898f..2bc0ccf74831 100644
--- a/packages/models-library/tests/test_service_resources.py
+++ b/packages/models-library/tests/test_service_resources.py
@@ -13,7 +13,7 @@
ServiceResourcesDict,
ServiceResourcesDictHelpers,
)
-from pydantic import parse_obj_as
+from pydantic import TypeAdapter
@pytest.mark.parametrize(
@@ -27,19 +27,19 @@
),
)
def test_compose_image(example: str) -> None:
- parse_obj_as(DockerGenericTag, example)
+ TypeAdapter(DockerGenericTag).validate_python(example)
@pytest.fixture
def resources_dict() -> ResourcesDict:
- return parse_obj_as(
- ResourcesDict, ImageResources.Config.schema_extra["example"]["resources"]
+ return TypeAdapter(ResourcesDict).validate_python(
+ ImageResources.model_config["json_schema_extra"]["example"]["resources"]
)
@pytest.fixture
def compose_image() -> DockerGenericTag:
- return parse_obj_as(DockerGenericTag, "image:latest")
+ return TypeAdapter(DockerGenericTag).validate_python("image:latest")
def _ensure_resource_value_is_an_object(data: ResourcesDict) -> None:
@@ -56,21 +56,21 @@ def test_resources_dict_parsed_as_expected(resources_dict: ResourcesDict) -> Non
def test_image_resources_parsed_as_expected() -> None:
- result: ImageResources = ImageResources.parse_obj(
- ImageResources.Config.schema_extra["example"]
+ result: ImageResources = ImageResources.model_validate(
+ ImageResources.model_config["json_schema_extra"]["example"]
)
_ensure_resource_value_is_an_object(result.resources)
assert type(result) == ImageResources
- result: ImageResources = parse_obj_as(
- ImageResources, ImageResources.Config.schema_extra["example"]
+ result: ImageResources = TypeAdapter(ImageResources).validate_python(
+ ImageResources.model_config["json_schema_extra"]["example"]
)
assert type(result) == ImageResources
_ensure_resource_value_is_an_object(result.resources)
@pytest.mark.parametrize(
- "example", ServiceResourcesDictHelpers.Config.schema_extra["examples"]
+ "example", ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"]
)
def test_service_resource_parsed_as_expected(
example: dict[DockerGenericTag, Any], compose_image: DockerGenericTag
@@ -84,27 +84,27 @@ def _assert_service_resources_dict(
for image_resources in service_resources_dict.values():
_ensure_resource_value_is_an_object(image_resources.resources)
- service_resources_dict: ServiceResourcesDict = parse_obj_as(
- ServiceResourcesDict, example
- )
+ service_resources_dict: ServiceResourcesDict = TypeAdapter(
+ ServiceResourcesDict
+ ).validate_python(example)
_assert_service_resources_dict(service_resources_dict)
for image_resources in example.values():
service_resources_dict_from_single_service = (
ServiceResourcesDictHelpers.create_from_single_service(
image=compose_image,
- resources=ImageResources.parse_obj(image_resources).resources,
+ resources=ImageResources.model_validate(image_resources).resources,
)
)
_assert_service_resources_dict(service_resources_dict_from_single_service)
@pytest.mark.parametrize(
- "example", ServiceResourcesDictHelpers.Config.schema_extra["examples"]
+ "example", ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"]
)
def test_create_jsonable_dict(example: dict[DockerGenericTag, Any]) -> None:
- service_resources_dict: ServiceResourcesDict = parse_obj_as(
- ServiceResourcesDict, example
- )
+ service_resources_dict: ServiceResourcesDict = TypeAdapter(
+ ServiceResourcesDict
+ ).validate_python(example)
result = ServiceResourcesDictHelpers.create_jsonable(service_resources_dict)
assert example == result
diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py
index a564c1be88f7..f9c096804cc0 100644
--- a/packages/models-library/tests/test_service_settings_labels.py
+++ b/packages/models-library/tests/test_service_settings_labels.py
@@ -31,7 +31,7 @@
)
from models_library.services_resources import DEFAULT_SINGLE_SERVICE_NAME
from models_library.utils.string_substitution import TextTemplate
-from pydantic import BaseModel, ValidationError, parse_obj_as, parse_raw_as
+from pydantic import BaseModel, TypeAdapter, ValidationError, parse_obj_as
from pydantic.json import pydantic_encoder
@@ -43,17 +43,17 @@ class _Parametrization(NamedTuple):
SIMCORE_SERVICE_EXAMPLES = {
"legacy": _Parametrization(
- example=SimcoreServiceLabels.Config.schema_extra["examples"][0],
+ example=SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][0],
items=1,
uses_dynamic_sidecar=False,
),
"dynamic-service": _Parametrization(
- example=SimcoreServiceLabels.Config.schema_extra["examples"][1],
+ example=SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][1],
items=5,
uses_dynamic_sidecar=True,
),
"dynamic-service-with-compose-spec": _Parametrization(
- example=SimcoreServiceLabels.Config.schema_extra["examples"][2],
+ example=SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2],
items=6,
uses_dynamic_sidecar=True,
),
@@ -66,7 +66,7 @@ class _Parametrization(NamedTuple):
ids=list(SIMCORE_SERVICE_EXAMPLES.keys()),
)
def test_simcore_service_labels(example: dict, items: int, uses_dynamic_sidecar: bool):
- simcore_service_labels = SimcoreServiceLabels.parse_obj(example)
+ simcore_service_labels = SimcoreServiceLabels.model_validate(example)
assert simcore_service_labels
assert len(simcore_service_labels.dict(exclude_unset=True)) == items
@@ -75,11 +75,11 @@ def test_simcore_service_labels(example: dict, items: int, uses_dynamic_sidecar:
def test_service_settings():
simcore_settings_settings_label = SimcoreServiceSettingsLabel.parse_obj(
- SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"]
+ SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"]
)
assert simcore_settings_settings_label
assert len(simcore_settings_settings_label) == len(
- SimcoreServiceSettingLabelEntry.Config.schema_extra["examples"]
+ SimcoreServiceSettingLabelEntry.model_config["json_schema_extra"]["examples"]
)
assert simcore_settings_settings_label[0]
@@ -95,7 +95,7 @@ def test_correctly_detect_dynamic_sidecar_boot(
):
for name, example in model_cls_examples.items():
print(name, ":", pformat(example))
- model_instance = parse_obj_as(model_cls, example)
+ model_instance = TypeAdapter(model_cls).validate_python(example)
assert model_instance.callbacks_mapping is not None
assert model_instance.needs_dynamic_sidecar == (
"simcore.service.paths-mapping" in example
@@ -104,7 +104,7 @@ def test_correctly_detect_dynamic_sidecar_boot(
def test_raises_error_if_http_entrypoint_is_missing():
simcore_service_labels: dict[str, Any] = deepcopy(
- SimcoreServiceLabels.Config.schema_extra["examples"][2]
+ SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2]
)
del simcore_service_labels["simcore.service.container-http-entrypoint"]
@@ -113,22 +113,27 @@ def test_raises_error_if_http_entrypoint_is_missing():
def test_path_mappings_none_state_paths():
- sample_data = deepcopy(PathMappingsLabel.Config.schema_extra["examples"][0])
+ sample_data = deepcopy(
+ PathMappingsLabel.model_config["json_schema_extra"]["examples"][0]
+ )
sample_data["state_paths"] = None
with pytest.raises(ValidationError):
PathMappingsLabel(**sample_data)
def test_path_mappings_json_encoding():
- for example in PathMappingsLabel.Config.schema_extra["examples"]:
- path_mappings = PathMappingsLabel.parse_obj(example)
+ for example in PathMappingsLabel.model_config["json_schema_extra"]["examples"]:
+ path_mappings = PathMappingsLabel.model_validate(example)
print(path_mappings)
- assert PathMappingsLabel.parse_raw(path_mappings.json()) == path_mappings
+ assert (
+ PathMappingsLabel.parse_raw(path_mappings.model_dump_json())
+ == path_mappings
+ )
def test_simcore_services_labels_compose_spec_null_container_http_entry_provided():
sample_data: dict[str, Any] = deepcopy(
- SimcoreServiceLabels.Config.schema_extra["examples"][2]
+ SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2]
)
assert sample_data["simcore.service.container-http-entrypoint"]
@@ -140,7 +145,7 @@ def test_simcore_services_labels_compose_spec_null_container_http_entry_provided
def test_raises_error_wrong_restart_policy():
simcore_service_labels: dict[str, Any] = deepcopy(
- SimcoreServiceLabels.Config.schema_extra["examples"][2]
+ SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2]
)
simcore_service_labels["simcore.service.restart-policy"] = "__not_a_valid_policy__"
@@ -150,7 +155,7 @@ def test_raises_error_wrong_restart_policy():
def test_path_mappings_label_unsupported_size_constraints():
with pytest.raises(ValidationError) as exec_into:
- PathMappingsLabel.parse_obj(
+ PathMappingsLabel.model_validate(
{
"outputs_path": "/ok_input_path",
"inputs_path": "/ok_output_path",
@@ -163,7 +168,7 @@ def test_path_mappings_label_unsupported_size_constraints():
def test_path_mappings_label_defining_constraing_on_missing_path():
with pytest.raises(ValidationError) as exec_into:
- PathMappingsLabel.parse_obj(
+ PathMappingsLabel.model_validate(
{
"outputs_path": "/ok_input_path",
"inputs_path": "/ok_output_path",
@@ -177,10 +182,10 @@ def test_path_mappings_label_defining_constraing_on_missing_path():
)
-PORT_1: Final[PortInt] = parse_obj_as(PortInt, 1)
-PORT_3: Final[PortInt] = parse_obj_as(PortInt, 3)
-PORT_20: Final[PortInt] = parse_obj_as(PortInt, 20)
-PORT_99: Final[PortInt] = parse_obj_as(PortInt, 99)
+PORT_1: Final[PortInt] = TypeAdapter(PortInt).validate_python(1)
+PORT_3: Final[PortInt] = TypeAdapter(PortInt).validate_python(3)
+PORT_20: Final[PortInt] = TypeAdapter(PortInt).validate_python(20)
+PORT_99: Final[PortInt] = TypeAdapter(PortInt).validate_python(99)
def test_port_range():
@@ -580,7 +585,7 @@ def test_resolving_some_service_labels_at_load_time(
)
assert template.is_valid()
resolved_label: str = template.safe_substitute(vendor_environments)
- to_restore = parse_raw_as(pydantic_model, resolved_label)
+ to_restore = TypeAdapter(pydantic_model).validate_json(resolved_label)
setattr(service_meta, attribute_name, to_restore)
print(json.dumps(service_labels, indent=1))
@@ -590,8 +595,8 @@ def test_resolving_some_service_labels_at_load_time(
# NOTE: vendor values are in the database and therefore are available at this point
labels = SimcoreServiceLabels.parse_obj(service_labels)
- print("After", labels.json(indent=1))
- formatted_json = service_meta.json(indent=1)
+ print("After", labels.model_dump_json(indent=1))
+ formatted_json = service_meta.model_dump_json(indent=1)
print("After", formatted_json)
for entry in vendor_environments:
print(entry)
@@ -601,7 +606,7 @@ def test_resolving_some_service_labels_at_load_time(
def test_user_preferences_path_is_part_of_exiting_volume():
labels_data = {
"simcore.service.paths-mapping": json.dumps(
- PathMappingsLabel.Config.schema_extra["examples"][0]
+ PathMappingsLabel.model_config["json_schema_extra"]["examples"][0]
),
"simcore.service.user-preferences-path": json.dumps(
"/tmp/outputs" # noqa: S108
diff --git a/packages/models-library/tests/test_services.py b/packages/models-library/tests/test_services.py
index 54906743db10..c7b7562eaa63 100644
--- a/packages/models-library/tests/test_services.py
+++ b/packages/models-library/tests/test_services.py
@@ -182,7 +182,7 @@ def _find_pattern_entry(obj: dict[str, Any], key: str) -> Any:
def test_boot_option_wrong_default() -> None:
- for example in [deepcopy(x) for x in BootOption.Config.schema_extra["examples"]]:
+ for example in [deepcopy(x) for x in BootOption.model_config["json_schema_extra"]["examples"]]:
with pytest.raises(ValueError):
example["default"] = "__undefined__"
assert BootOption(**example)
@@ -201,11 +201,12 @@ def test_service_docker_data_labels_convesion():
# we want labels to look like io.simcore.a_label_property
convension_breaking_fields: set[tuple[str, str]] = set()
- fiedls_with_aliases: list[tuple[str, str]] = [
- (x.name, x.alias) for x in ServiceMetaDataPublished.__fields__.values()
+ fields_with_aliases: list[tuple[str, str]] = [
+ (name, info.alias) for name, info in ServiceMetaDataPublished.model_fields.items()
+ if info.alias is not None
]
- for name, alias in fiedls_with_aliases:
+ for name, alias in fields_with_aliases:
if alias in FIELD_NAME_EXCEPTIONS:
continue
# check dashes and uppercase
diff --git a/packages/models-library/tests/test_utils_common_validators.py b/packages/models-library/tests/test_utils_common_validators.py
index d7fe367ab5dd..d4c7cb5409f5 100644
--- a/packages/models-library/tests/test_utils_common_validators.py
+++ b/packages/models-library/tests/test_utils_common_validators.py
@@ -7,7 +7,7 @@
none_to_empty_str_pre_validator,
null_or_none_str_to_none_validator,
)
-from pydantic import BaseModel, ValidationError, validator
+from pydantic import BaseModel, ValidationError, field_validator
def test_enums_pre_validator():
@@ -20,13 +20,14 @@ class Model(BaseModel):
class ModelWithPreValidator(BaseModel):
color: Enum1
- _from_equivalent_enums = validator("color", allow_reuse=True, pre=True)(
+ _from_equivalent_enums = field_validator("color", mode="before")(
create_enums_pre_validator(Enum1)
)
# with Enum1
model = Model(color=Enum1.RED)
- assert ModelWithPreValidator(color=Enum1.RED) == model
+ # See: https://docs.pydantic.dev/latest/migration/#changes-to-pydanticbasemodel
+ assert ModelWithPreValidator(color=Enum1.RED).model_dump() == model.model_dump()
# with Enum2
class Enum2(Enum):
@@ -35,14 +36,15 @@ class Enum2(Enum):
with pytest.raises(ValidationError):
Model(color=Enum2.RED)
- assert ModelWithPreValidator(color=Enum2.RED) == model
+ # See: https://docs.pydantic.dev/latest/migration/#changes-to-pydanticbasemodel
+ assert ModelWithPreValidator(color=Enum2.RED).model_dump() == model.model_dump()
def test_empty_str_to_none_pre_validator():
class Model(BaseModel):
nullable_message: str | None
- _empty_is_none = validator("nullable_message", allow_reuse=True, pre=True)(
+ _empty_is_none = field_validator("nullable_message", mode="before")(
empty_str_to_none_pre_validator
)
@@ -54,7 +56,7 @@ def test_none_to_empty_str_pre_validator():
class Model(BaseModel):
message: str
- _none_is_empty = validator("message", allow_reuse=True, pre=True)(
+ _none_is_empty = field_validator("message", mode="before")(
none_to_empty_str_pre_validator
)
@@ -66,9 +68,9 @@ def test_null_or_none_str_to_none_validator():
class Model(BaseModel):
message: str | None
- _null_or_none_str_to_none_validator = validator(
- "message", allow_reuse=True, pre=True
- )(null_or_none_str_to_none_validator)
+ _null_or_none_str_to_none_validator = field_validator("message", mode="before")(
+ null_or_none_str_to_none_validator
+ )
model = Model.parse_obj({"message": "none"})
assert model == Model.parse_obj({"message": None})
diff --git a/packages/models-library/tests/test_utils_nodes.py b/packages/models-library/tests/test_utils_nodes.py
index 47465ce236d3..87831ac88078 100644
--- a/packages/models-library/tests/test_utils_nodes.py
+++ b/packages/models-library/tests/test_utils_nodes.py
@@ -14,7 +14,6 @@
SimcoreS3FileID,
)
from models_library.utils.nodes import compute_node_hash
-from pydantic import AnyUrl, parse_obj_as
@pytest.fixture()
@@ -45,9 +44,7 @@ def node_id() -> NodeID:
"input_bool": True,
"input_string": "string",
"input_downloadlink": DownloadLink(
- downloadLink=parse_obj_as(
- AnyUrl, "http://httpbin.org/image/jpeg"
- )
+ downloadLink="http://httpbin.org/image/jpeg"
),
"input_simcorelink": SimCoreFileLink(
store=0,
diff --git a/packages/models-library/tests/test_utils_pydantic_tools_extension.py b/packages/models-library/tests/test_utils_pydantic_tools_extension.py
index 95a50099c028..34cbb528dbb8 100644
--- a/packages/models-library/tests/test_utils_pydantic_tools_extension.py
+++ b/packages/models-library/tests/test_utils_pydantic_tools_extension.py
@@ -14,15 +14,20 @@ class MyModel(BaseModel):
def test_schema():
- assert MyModel.schema() == {
+ assert MyModel.model_json_schema() == {
"title": "MyModel",
"type": "object",
"properties": {
"a": {"title": "A", "type": "integer"},
- "b": {"title": "B", "type": "integer"},
+ "b": {"anyOf": [{"type": "integer"}, {"type": "null"}], "title": "B"},
"c": {"title": "C", "default": 42, "type": "integer"},
- "d": {"title": "D", "type": "integer"},
+ "d": {
+ "anyOf": [{"type": "integer"}, {"type": "null"}],
+ "default": None,
+ "title": "D",
+ },
"e": {
+ "default": None,
"title": "E",
"type": "integer",
"description": "optional non-nullable",
diff --git a/packages/models-library/tests/test_utils_service_io.py b/packages/models-library/tests/test_utils_service_io.py
index 2bf58a288699..51e4324e2be2 100644
--- a/packages/models-library/tests/test_utils_service_io.py
+++ b/packages/models-library/tests/test_utils_service_io.py
@@ -20,10 +20,10 @@
from pydantic import parse_obj_as
example_inputs_labels = [
- e for e in ServiceInput.Config.schema_extra["examples"] if e["label"]
+ e for e in ServiceInput.model_config["json_schema_extra"]["examples"] if e["label"]
]
example_outputs_labels = [
- e for e in ServiceOutput.Config.schema_extra["examples"] if e["label"]
+ e for e in ServiceOutput.model_config["json_schema_extra"]["examples"] if e["label"]
]
@@ -31,16 +31,16 @@
def service_port(request: pytest.FixtureRequest) -> ServiceInput | ServiceOutput:
try:
index = example_inputs_labels.index(request.param)
- example = ServiceInput.Config.schema_extra["examples"][index]
+ example = ServiceInput.model_config["json_schema_extra"]["examples"][index]
return ServiceInput.parse_obj(example)
except ValueError:
index = example_outputs_labels.index(request.param)
- example = ServiceOutput.Config.schema_extra["examples"][index]
+ example = ServiceOutput.model_config["json_schema_extra"]["examples"][index]
return ServiceOutput.parse_obj(example)
def test_get_schema_from_port(service_port: ServiceInput | ServiceOutput):
- print(service_port.json(indent=2))
+ print(service_port.model_dump_json(indent=2))
# get
schema = get_service_io_json_schema(service_port)
diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py
index 7cfbf13df11a..297e9a9ab13f 100644
--- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py
+++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py
@@ -82,7 +82,7 @@ def _is_model_cls(obj) -> bool:
for model_name, model_cls in inspect.getmembers(module, _is_model_cls):
assert model_name # nosec
if (
- (config_cls := model_cls.Config)
+ (config_cls := model_cls.model_config)
and inspect.isclass(config_cls)
and is_strict_inner(model_cls, config_cls)
and (schema_extra := getattr(config_cls, "schema_extra", {}))
@@ -121,7 +121,9 @@ def model_cls_examples(model_cls: type[BaseModel]) -> dict[str, dict[str, Any]]:
)
# checks exampleS setup in schema_extra
- examples_list = copy.deepcopy(model_cls.Config.schema_extra.get("examples", []))
+ examples_list = copy.deepcopy(
+ model_cls.model_config["json_schema_extra"].get("examples", [])
+ )
assert isinstance(examples_list, list), (
"OpenAPI and json-schema differ regarding the format for exampleS."
"The former is a dict and the latter an array. "
@@ -131,7 +133,7 @@ def model_cls_examples(model_cls: type[BaseModel]) -> dict[str, dict[str, Any]]:
)
# check example in schema_extra
- example = copy.deepcopy(model_cls.Config.schema_extra.get("example"))
+ example = copy.deepcopy(model_cls.model_config["json_schema_extra"].get("example"))
# collect all examples and creates fixture -> {example-name: example, ...}
examples = {
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 3316f4276ed2..3e40b2694d4f 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -32,11 +32,6 @@ urllib3>=1.26.5 # https://github.com/advisories/GH
# Breaking changes -----------------------------------------------------------------------------------------
#
-
-# SEE https://github.com/ITISFoundation/osparc-simcore/issues/4481
-fastapi<0.100.0
-pydantic<2.0
-
# with new released version 1.0.0 (https://github.com/aio-libs/aiozipkin/releases).
# TODO: includes async features https://docs.sqlalchemy.org/en/14/changelog/migration_20.html
sqlalchemy<2.0
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py
index 889b8cfcd1cb..285d08ef58e6 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/modules/s3.py
@@ -4,6 +4,7 @@
from aws_library.s3 import S3NotConnectedError, SimcoreS3API
from fastapi import FastAPI
from models_library.api_schemas_storage import S3BucketName
+from pydantic import TypeAdapter
from settings_library.s3 import S3Settings
from tenacity import (
AsyncRetrying,
@@ -36,7 +37,9 @@ async def on_startup() -> None:
):
with attempt:
connected = await client.http_check_bucket_connected(
- bucket=S3BucketName(settings.S3_BUCKET_NAME)
+ bucket=TypeAdapter(S3BucketName).validate_python(
+ settings.S3_BUCKET_NAME
+ )
)
if not connected:
raise S3NotConnectedError # pragma: no cover
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py
index 75e17e28c96d..8977d6fa5a08 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py
@@ -18,7 +18,7 @@
from models_library.rest_ordering import OrderBy
from models_library.users import UserID
from models_library.wallets import WalletID
-from pydantic import AnyUrl, PositiveInt
+from pydantic import AnyUrl, PositiveInt, TypeAdapter
from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import (
CustomResourceUsageTrackerError,
)
@@ -158,7 +158,7 @@ async def export_service_runs(
started_until = filters.started_at.until if filters else None
# Create S3 key name
- s3_bucket_name = S3BucketName(bucket_name)
+ s3_bucket_name = TypeAdapter(S3BucketName).validate_python(bucket_name)
# NOTE: su stands for "service usage"
file_name = f"su_{shortuuid.uuid()}.csv"
s3_object_key = f"resource-usage-tracker-service-runs/{datetime.now(tz=timezone.utc).date()}/{file_name}"
diff --git a/services/storage/src/simcore_service_storage/handlers_health.py b/services/storage/src/simcore_service_storage/handlers_health.py
index 4ae743afef95..eb94feb9bb41 100644
--- a/services/storage/src/simcore_service_storage/handlers_health.py
+++ b/services/storage/src/simcore_service_storage/handlers_health.py
@@ -11,6 +11,7 @@
from models_library.api_schemas_storage import HealthCheck, S3BucketName
from models_library.app_diagnostics import AppStatusCheck
from models_library.utils.json_serialization import json_dumps
+from pydantic import TypeAdapter
from servicelib.rest_constants import RESPONSE_MODEL_POLICY
from ._meta import API_VERSION, API_VTAG, PROJECT_NAME, VERSION
@@ -53,7 +54,9 @@ async def get_status(request: web.Request) -> web.Response:
s3_state = (
"connected"
if await get_s3_client(request.app).bucket_exists(
- bucket=S3BucketName(app_settings.STORAGE_S3.S3_BUCKET_NAME)
+ bucket=TypeAdapter(S3BucketName).validate_python(
+ app_settings.STORAGE_S3.S3_BUCKET_NAME
+ )
)
else "no access to S3 bucket"
)
diff --git a/services/storage/tests/unit/test_dsm_soft_links.py b/services/storage/tests/unit/test_dsm_soft_links.py
index d2d1c6acd65e..ed0e01ea7f0c 100644
--- a/services/storage/tests/unit/test_dsm_soft_links.py
+++ b/services/storage/tests/unit/test_dsm_soft_links.py
@@ -13,7 +13,7 @@
from models_library.projects_nodes_io import SimcoreS3FileID
from models_library.users import UserID
from models_library.utils.fastapi_encoders import jsonable_encoder
-from pydantic import ByteSize
+from pydantic import ByteSize, TypeAdapter
from simcore_postgres_database.storage_models import file_meta_data
from simcore_service_storage.models import FileMetaData, FileMetaDataAtDB
from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager
@@ -34,7 +34,7 @@ async def output_file(
file = FileMetaData.from_simcore_node(
user_id=user_id,
file_id=SimcoreS3FileID(f"{project_id}/{node_id}/filename.txt"),
- bucket=S3BucketName("master-simcore"),
+ bucket=TypeAdapter(S3BucketName).validate_python("master-simcore"),
location_id=SimcoreS3DataManager.get_location_id(),
location_name=SimcoreS3DataManager.get_location_name(),
sha256_checksum=faker.sha256(),
diff --git a/services/storage/tests/unit/test_models.py b/services/storage/tests/unit/test_models.py
index 82bd900b772b..0dbab6821d24 100644
--- a/services/storage/tests/unit/test_models.py
+++ b/services/storage/tests/unit/test_models.py
@@ -4,7 +4,7 @@
from models_library.api_schemas_storage import S3BucketName
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID, SimcoreS3FileID, StorageFileID
-from pydantic import ValidationError, parse_obj_as
+from pydantic import TypeAdapter, ValidationError
from simcore_service_storage.models import FileMetaData
from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager
@@ -15,7 +15,7 @@
)
def test_file_id_raises_error(file_id: str):
with pytest.raises(ValidationError):
- parse_obj_as(StorageFileID, file_id)
+ TypeAdapter(StorageFileID).validate_python(file_id)
@pytest.mark.parametrize(
@@ -38,17 +38,17 @@ def test_file_id_raises_error(file_id: str):
],
)
def test_file_id(file_id: str):
- parsed_file_id = parse_obj_as(StorageFileID, file_id)
+ parsed_file_id = TypeAdapter(StorageFileID).validate_python(file_id)
assert parsed_file_id
assert parsed_file_id == file_id
def test_fmd_build():
- file_id = parse_obj_as(SimcoreS3FileID, f"api/{uuid.uuid4()}/xx.dat")
+ file_id = TypeAdapter(SimcoreS3FileID).validate_python(f"api/{uuid.uuid4()}/xx.dat")
fmd = FileMetaData.from_simcore_node(
user_id=12,
file_id=file_id,
- bucket=S3BucketName("test-bucket"),
+ bucket=TypeAdapter(S3BucketName).validate_python("test-bucket"),
location_id=SimcoreS3DataManager.get_location_id(),
location_name=SimcoreS3DataManager.get_location_name(),
sha256_checksum=None,
@@ -64,11 +64,13 @@ def test_fmd_build():
assert fmd.location_id == SimcoreS3DataManager.get_location_id()
assert fmd.bucket_name == "test-bucket"
- file_id = parse_obj_as(SimcoreS3FileID, f"{uuid.uuid4()}/{uuid.uuid4()}/xx.dat")
+ file_id = TypeAdapter(SimcoreS3FileID).validate_python(
+ f"{uuid.uuid4()}/{uuid.uuid4()}/xx.dat"
+ )
fmd = FileMetaData.from_simcore_node(
user_id=12,
file_id=file_id,
- bucket=S3BucketName("test-bucket"),
+ bucket=TypeAdapter(S3BucketName).validate_python("test-bucket"),
location_id=SimcoreS3DataManager.get_location_id(),
location_name=SimcoreS3DataManager.get_location_name(),
sha256_checksum=None,
diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py
index 1b71a1c29d58..90dca22d42dd 100644
--- a/services/storage/tests/unit/test_utils.py
+++ b/services/storage/tests/unit/test_utils.py
@@ -16,7 +16,7 @@
from faker import Faker
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID, SimcoreS3FileID
-from pydantic import ByteSize, HttpUrl, parse_obj_as
+from pydantic import ByteSize, HttpUrl, TypeAdapter, parse_obj_as
from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID
from simcore_service_storage.models import ETag, FileMetaData, S3BucketName, UploadID
from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager
@@ -79,7 +79,7 @@ def test_file_entry_valid(
fmd = FileMetaData.from_simcore_node(
user_id=faker.pyint(min_value=1),
file_id=file_id,
- bucket=S3BucketName("pytest-bucket"),
+ bucket=TypeAdapter(S3BucketName).validate_python("pytest-bucket"),
location_id=SimcoreS3DataManager.get_location_id(),
location_name=SimcoreS3DataManager.get_location_name(),
sha256_checksum=None,
diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py
index 8613fbc83193..4f6b3c3cf1fb 100644
--- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py
+++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py
@@ -27,7 +27,7 @@
)
from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle
from models_library.projects import ProjectID
-from models_library.projects_access import Owner, PositiveIntWithExclusiveMinimumRemoved
+from models_library.projects_access import Owner
from models_library.projects_state import (
ProjectLocked,
ProjectRunningState,
@@ -41,6 +41,7 @@
ServiceResourcesDictHelpers,
)
from models_library.utils.fastapi_encoders import jsonable_encoder
+from pydantic import PositiveInt
from pytest_simcore.helpers.assert_checks import assert_status
from pytest_simcore.helpers.typing_env import EnvVarsDict
from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in
@@ -1308,7 +1309,7 @@ async def test_open_shared_project_2_users_locked(
expected_project_state_client_2.locked.value = True
expected_project_state_client_2.locked.status = ProjectStatus.OPENED
owner2 = Owner(
- user_id=PositiveIntWithExclusiveMinimumRemoved(user_2["id"]),
+ user_id=PositiveInt(user_2["id"]),
first_name=user_2.get("first_name", None),
last_name=user_2.get("last_name", None),
)