Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🐛 Clean Pydantic model_dumps warnings #7358

Open
wants to merge 19 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 13 additions & 7 deletions packages/models-library/src/models_library/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,9 @@ def from_key(cls, key: str) -> "DockerLabelKey":
_UNDEFINED_LABEL_VALUE_INT: Final[str] = "0"


DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: Final[
DockerLabelKey
] = TypeAdapter(DockerLabelKey).validate_python("ec2-instance-type")
DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: Final[DockerLabelKey] = (
TypeAdapter(DockerLabelKey).validate_python("ec2-instance-type")
)


def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey:
Expand Down Expand Up @@ -122,18 +122,24 @@ def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]:

mapped_values.setdefault(
f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit",
_UNDEFINED_LABEL_VALUE_INT,
values.get("memory_limit", _UNDEFINED_LABEL_VALUE_INT),
)

def _convert_nano_cpus_to_cpus(nano_cpu: str) -> str:
with contextlib.suppress(ValidationError):
return f"{TypeAdapter(float).validate_python(nano_cpu) / (1.0*10**9):.2f}"
return f"{TypeAdapter(float).validate_python(nano_cpu) / (1.0 * 10**9):.2f}"
return _UNDEFINED_LABEL_VALUE_INT

mapped_values.setdefault(
f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit",
_convert_nano_cpus_to_cpus(
values.get("nano_cpus_limit", _UNDEFINED_LABEL_VALUE_INT)
values.get(
"cpu_limit",
_convert_nano_cpus_to_cpus(
values.get(
"nano_cpus_limit",
_UNDEFINED_LABEL_VALUE_INT,
)
),
),
)
return mapped_values
Expand Down
21 changes: 18 additions & 3 deletions packages/models-library/tests/test_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@


from typing import Any
from uuid import UUID

import pytest
from faker import Faker
Expand All @@ -13,7 +14,7 @@
DockerLabelKey,
StandardSimcoreDockerLabels,
)
from pydantic import TypeAdapter, ValidationError
from pydantic import ByteSize, TypeAdapter, ValidationError

_faker = Faker()

Expand Down Expand Up @@ -83,11 +84,11 @@ def test_docker_label_key(label_key: str, valid: bool):
True,
),
(
f"registry:5000/si.m--c_ore/services/1234/jupyter-smash:{'A'*128}",
f"registry:5000/si.m--c_ore/services/1234/jupyter-smash:{'A' * 128}",
True,
),
(
f"registry:5000/si.m--c_ore/services/1234/jupyter-smash:{'A'*129}",
f"registry:5000/si.m--c_ore/services/1234/jupyter-smash:{'A' * 129}",
False,
),
),
Expand Down Expand Up @@ -122,3 +123,17 @@ def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]):
).validate_python(exported_dict)
assert re_imported_docker_label_keys
assert simcore_service_docker_label_keys == re_imported_docker_label_keys


def test_simcore_service_docker_label_keys_construction():
simcore_service_docker_label_keys = StandardSimcoreDockerLabels(
user_id=8268,
project_id=UUID("5ea24ce0-0e4d-4ee6-a3f1-e4799752a684"),
node_id=UUID("c17c6279-23c6-412f-8826-867323a7711a"),
product_name="osparc",
simcore_user_agent="oePqmjQbZndJghceKRJR",
swarm_stack_name="UNDEFINED_DOCKER_LABEL", # NOTE: there is currently no need for this label in the comp backend
memory_limit=ByteSize(23424324),
cpu_limit=1.0,
)
assert simcore_service_docker_label_keys.cpu_limit == 1.0
Original file line number Diff line number Diff line change
Expand Up @@ -541,7 +541,8 @@ def as_label_data(self) -> str:
# compose_spec needs to be json encoded before encoding it to json
# and storing it in the label
return self.model_copy(
update={"compose_spec": json.dumps(self.compose_spec)}, deep=True
update={"compose_spec": json.dumps(self.compose_spec)},
deep=True,
).model_dump_json()

model_config = ConfigDict(extra="allow", populate_by_name=True)
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,9 @@ def _update_paths_mappings(
)
env_vars["DY_SIDECAR_PATH_INPUTS"] = f"{path_mappings.inputs_path}"
env_vars["DY_SIDECAR_PATH_OUTPUTS"] = f"{path_mappings.outputs_path}"
env_vars[
"DY_SIDECAR_STATE_PATHS"
] = f"{json_dumps( { f'{p}' for p in path_mappings.state_paths } )}"
env_vars["DY_SIDECAR_STATE_PATHS"] = (
f"{json_dumps( { f'{p}' for p in path_mappings.state_paths } )}"
)

service_content["environment"] = _EnvironmentSection.export_as_list(env_vars)

Expand Down Expand Up @@ -241,15 +241,17 @@ def _update_container_labels(
spec_service_key, default_limits
)

label_keys = StandardSimcoreDockerLabels.model_construct(
user_id=user_id,
project_id=project_id,
node_id=node_id,
simcore_user_agent=simcore_user_agent,
product_name=product_name,
swarm_stack_name=swarm_stack_name,
memory_limit=ByteSize(container_limits["memory"]),
cpu_limit=container_limits["cpu"],
label_keys = StandardSimcoreDockerLabels.model_validate(
{
"user_id": user_id,
"project_id": project_id,
"node_id": node_id,
"simcore_user_agent": simcore_user_agent,
"product_name": product_name,
"swarm_stack_name": swarm_stack_name,
"memory_limit": ByteSize(container_limits["memory"]),
"cpu_limit": container_limits["cpu"],
}
)
docker_labels = [
f"{k}={v}" for k, v in label_keys.to_simcore_runtime_docker_labels().items()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -315,17 +315,19 @@ def compute_task_labels(
ValidationError
"""
product_name = run_metadata.get("product_name", UNDEFINED_DOCKER_LABEL)
standard_simcore_labels = StandardSimcoreDockerLabels.model_construct(
user_id=user_id,
project_id=project_id,
node_id=node_id,
product_name=product_name,
simcore_user_agent=run_metadata.get(
"simcore_user_agent", UNDEFINED_DOCKER_LABEL
),
swarm_stack_name=UNDEFINED_DOCKER_LABEL, # NOTE: there is currently no need for this label in the comp backend
memory_limit=node_requirements.ram,
cpu_limit=node_requirements.cpu,
standard_simcore_labels = StandardSimcoreDockerLabels.model_validate(
{
"user_id": user_id,
"project_id": project_id,
"node_id": node_id,
"product_name": product_name,
"simcore_user_agent": run_metadata.get(
"simcore_user_agent", UNDEFINED_DOCKER_LABEL
),
"swarm_stack_name": UNDEFINED_DOCKER_LABEL, # NOTE: there is currently no need for this label in the comp backend
"memory_limit": node_requirements.ram,
"cpu_limit": node_requirements.cpu,
}
).to_simcore_runtime_docker_labels()
return standard_simcore_labels | TypeAdapter(ContainerLabelsDict).validate_python(
{
Expand Down Expand Up @@ -633,7 +635,7 @@ def check_if_cluster_is_able_to_run_pipeline(


async def wrap_client_async_routine(
client_coroutine: Coroutine[Any, Any, Any] | Any | None
client_coroutine: Coroutine[Any, Any, Any] | Any | None,
) -> Any:
"""Dask async behavior does not go well with Pylance as it returns
a union of types. this wrapper makes both mypy and pylance happy"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def test_regression_service_has_no_reservations():
f"{to_simcore_runtime_docker_label_key('user-id')}={USER_ID}",
f"{to_simcore_runtime_docker_label_key('node-id')}={NODE_ID}",
f"{to_simcore_runtime_docker_label_key('swarm-stack-name')}={SWARM_STACK_NAME}",
f"{to_simcore_runtime_docker_label_key('cpu-limit')}=0",
f"{to_simcore_runtime_docker_label_key('cpu-limit')}=0.0",
f"{to_simcore_runtime_docker_label_key('memory-limit')}=0",
]
)
Expand Down
Loading